From 2ab46cbc36f5d4adbf3ad789352310decc4912fa Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 11 Sep 2025 02:00:27 +0000 Subject: [PATCH 1/3] feat: A new service `BusinessGlossaryService` is added feat: A new message `Glossary` is added feat: A new resource_definition `dataplex.googleapis.com/Glossary` is added feat: A new message `GlossaryCategory` is added feat: A new resource_definition `dataplex.googleapis.com/GlossaryCategory` is added feat: A new message `GlossaryTerm` is added feat: A new resource_definition `dataplex.googleapis.com/GlossaryTerm` is added feat: A new message `CreateGlossaryRequest` is added feat: A new message `UpdateGlossaryRequest` is added feat: A new message `DeleteGlossaryRequest` is added feat: A new message `GetGlossaryRequest` is added feat: A new message `ListGlossariesRequest` is added feat: A new message `ListGlossariesResponse` is added feat: A new message `CreateGlossaryCategoryRequest` is added feat: A new message `UpdateGlossaryCategoryRequest` is added feat: A new message `DeleteGlossaryCategoryRequest` is added feat: A new message `GetGlossaryCategoryRequest` is added feat: A new message `ListGlossaryCategoriesRequest` is added feat: A new message `ListGlossaryCategoriesResponse` is added feat: A new message `CreateGlossaryTermRequest` is added feat: A new message `UpdateGlossaryTermRequest` is added feat: A new message `DeleteGlossaryTermRequest` is added feat: A new message `GetGlossaryTermRequest` is added feat: A new message `ListGlossaryTermsRequest` is added feat: A new message `ListGlossaryTermsResponse` is added feat: A new method `CreateEntryLink` is added to service `CatalogService` feat: A new method `DeleteEntryLink` is added to service `CatalogService` feat: A new method `GetEntryLink` is added to service `CatalogService` feat: A new field `entry_link` is added to message `.google.cloud.dataplex.v1.ImportItem` feat: A new field `deleted_entry_links` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new field `created_entry_links` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new field `unchanged_entry_links` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new field `glossaries` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new field `entry_link_types` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new field `referenced_entry_scopes` is added to message `.google.cloud.dataplex.v1.MetadataJob` feat: A new message `EntryLink` is added feat: A new resource_definition `dataplex.googleapis.com/EntryLink` is added feat: A new message `CreateEntryLinkRequest` is added feat: A new message `DeleteEntryLinkRequest` is added feat: A new message `GetEntryLinkRequest` is added feat: A new field `project` is added to message `.google.cloud.dataplex.v1.DataDiscoverySpec` feat: A new field `catalog_publishing_enabled` is added to message `.google.cloud.dataplex.v1.DataQualitySpec` feat: A new field `catalog_publishing_status` is added to message `.google.cloud.dataplex.v1.DataQualityResult` feat: A new field `passed` is added to message `.google.cloud.dataplex.v1.DataQualityColumnResult` feat: A new field `dimensions` is added to message `.google.cloud.dataplex.v1.DataQualityColumnResult` feat: A new message `DataScanCatalogPublishingStatus` is added feat: A new field `catalog_publishing_status` is added to message `.google.cloud.dataplex.v1.DataScanEvent` docs: A comment for field `image_version` in message `.google.cloud.dataplex.v1.Environment` is changed docs: A comment for service `CatalogService` is changed docs: A comment for method `CreateMetadataJob` in service `CatalogService` is changed docs: A comment for message `AspectType` is changed docs: A comment for field `alternate_use_permission` in message `.google.cloud.dataplex.v1.AspectType` is changed docs: A comment for field `type` in message `.google.cloud.dataplex.v1.AspectType` is changed docs: A comment for field `alternate_use_permission` in message `.google.cloud.dataplex.v1.EntryType` is changed docs: A comment for field `create_time` in message `.google.cloud.dataplex.v1.Entry` is changed docs: A comment for field `update_time` in message `.google.cloud.dataplex.v1.Entry` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.CreateEntryGroupRequest` is changed docs: A comment for message `DeleteEntryTypeRequest` is changed docs: A comment for message `DeleteAspectTypeRequest` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.SearchEntriesRequest` is changed docs: A comment for field `query` in message `.google.cloud.dataplex.v1.SearchEntriesRequest` is changed docs: A comment for field `order_by` in message `.google.cloud.dataplex.v1.SearchEntriesRequest` is changed docs: A comment for field `update_mask` in message `.google.cloud.dataplex.v1.ImportItem` is changed docs: A comment for field `aspect_keys` in message `.google.cloud.dataplex.v1.ImportItem` is changed docs: A comment for enum value `FULL` in enum `SyncMode` is changed docs: A comment for field `output_path` in message `.google.cloud.dataplex.v1.MetadataJob` is changed docs: A comment for service `CmekService` is changed docs: A comment for service `ContentService` is changed docs: A comment for field `results_table` in message `.google.cloud.dataplex.v1.DataProfileSpec` is changed docs: A comment for field `row_filter` in message `.google.cloud.dataplex.v1.DataProfileSpec` is changed docs: A comment for field `min_length` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `max_length` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `average_length` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `average` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `standard_deviation` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `min` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `quartiles` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `max` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `average` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `standard_deviation` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `min` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `quartiles` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `max` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `value` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `count` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `ratio` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `null_ratio` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `distinct_ratio` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `top_n_values` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `type` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `mode` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `profile` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `fields` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `row_count` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `profile` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `scanned_data` in message `.google.cloud.dataplex.v1.DataProfileResult` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.DataQualityDimension` is changed docs: A comment for field `dimension` in message `.google.cloud.dataplex.v1.DataQualityRule` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.ListDataTaxonomiesRequest` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.CreateDataScanRequest` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.DeleteDataScanRequest` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.GetDataScanRequest` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.ListDataScansRequest` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.RunDataScanRequest` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.GetDataScanJobRequest` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.ListDataScanJobsRequest` is changed docs: A comment for field `rule` in message `.google.cloud.dataplex.v1.GenerateDataQualityRulesResponse` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.DataScan` is changed docs: A comment for field `name` in message `.google.cloud.dataplex.v1.DataScanJob` is changed docs: A comment for enum value `TASK_CONFIG` in enum `ExecutionTrigger` is changed docs: A comment for enum value `CREATED` in enum `State` is changed docs: A comment for field `user_managed` in message `.google.cloud.dataplex.v1.Schema` is changed docs: A comment for field `entity` in message `.google.cloud.dataplex.v1.DataSource` is changed docs: A comment for field `resource` in message `.google.cloud.dataplex.v1.DataSource` is changed docs: A comment for field `field` in message `.google.cloud.dataplex.v1.ScannedData` is changed docs: A comment for field `start` in message `.google.cloud.dataplex.v1.ScannedData` is changed docs: A comment for field `end` in message `.google.cloud.dataplex.v1.ScannedData` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.CreateLakeRequest` is changed docs: A comment for field `parent` in message `.google.cloud.dataplex.v1.ListLakesRequest` is changed docs: A comment for enum value `ABORTED` in enum `State` is changed docs: A comment for enum value `TASK_CONFIG` in enum `Trigger` is changed PiperOrigin-RevId: 805560354 Source-Link: https://github.com/googleapis/googleapis/commit/9978d435df71bb16ecadd1e4421640748d0bf533 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ff27b6c3b66d1bf0856be310aa560d44743a2431 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwbGV4Ly5Pd2xCb3QueWFtbCIsImgiOiJmZjI3YjZjM2I2NmQxYmYwODU2YmUzMTBhYTU2MGQ0NDc0M2EyNDMxIn0= --- .../google-cloud-dataplex/v1/.coveragerc | 13 + .../google-cloud-dataplex/v1/.flake8 | 34 + .../google-cloud-dataplex/v1/LICENSE | 202 + .../google-cloud-dataplex/v1/MANIFEST.in | 20 + .../google-cloud-dataplex/v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../google-cloud-dataplex/v1/docs/conf.py | 385 + .../dataplex_v1/business_glossary_service.rst | 10 + .../v1/docs/dataplex_v1/catalog_service.rst | 10 + .../v1/docs/dataplex_v1/cmek_service.rst | 10 + .../v1/docs/dataplex_v1/content_service.rst | 10 + .../v1/docs/dataplex_v1/data_scan_service.rst | 10 + .../dataplex_v1/data_taxonomy_service.rst | 10 + .../v1/docs/dataplex_v1/dataplex_service.rst | 10 + .../v1/docs/dataplex_v1/metadata_service.rst | 10 + .../v1/docs/dataplex_v1/services_.rst | 13 + .../v1/docs/dataplex_v1/types_.rst | 6 + .../google-cloud-dataplex/v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../v1/google/cloud/dataplex/__init__.py | 485 + .../v1/google/cloud/dataplex/gapic_version.py | 16 + .../v1/google/cloud/dataplex/py.typed | 2 + .../v1/google/cloud/dataplex_v1/__init__.py | 486 + .../cloud/dataplex_v1/gapic_metadata.json | 2006 ++ .../google/cloud/dataplex_v1/gapic_version.py | 16 + .../v1/google/cloud/dataplex_v1/py.typed | 2 + .../cloud/dataplex_v1/services/__init__.py | 15 + .../business_glossary_service/__init__.py | 22 + .../business_glossary_service/async_client.py | 2395 ++ .../business_glossary_service/client.py | 2770 ++ .../business_glossary_service/pagers.py | 444 + .../transports/README.rst | 9 + .../transports/__init__.py | 38 + .../transports/base.py | 448 + .../transports/grpc.py | 852 + .../transports/grpc_asyncio.py | 973 + .../transports/rest.py | 3535 ++ .../transports/rest_base.py | 883 + .../services/catalog_service/__init__.py | 22 + .../services/catalog_service/async_client.py | 4109 +++ .../services/catalog_service/client.py | 4507 +++ .../services/catalog_service/pagers.py | 861 + .../catalog_service/transports/README.rst | 9 + .../catalog_service/transports/__init__.py | 38 + .../catalog_service/transports/base.py | 754 + .../catalog_service/transports/grpc.py | 1220 + .../transports/grpc_asyncio.py | 1521 + .../catalog_service/transports/rest.py | 6010 ++++ .../catalog_service/transports/rest_base.py | 1451 + .../services/cmek_service/__init__.py | 22 + .../services/cmek_service/async_client.py | 1216 + .../services/cmek_service/client.py | 1592 + .../services/cmek_service/pagers.py | 166 + .../cmek_service/transports/README.rst | 9 + .../cmek_service/transports/__init__.py | 38 + .../services/cmek_service/transports/base.py | 307 + .../services/cmek_service/transports/grpc.py | 581 + .../cmek_service/transports/grpc_asyncio.py | 652 + .../services/cmek_service/transports/rest.py | 1907 ++ .../cmek_service/transports/rest_base.py | 472 + .../services/content_service/__init__.py | 22 + .../services/content_service/async_client.py | 1497 + .../services/content_service/client.py | 1878 + .../services/content_service/pagers.py | 167 + .../content_service/transports/README.rst | 9 + .../content_service/transports/__init__.py | 38 + .../content_service/transports/base.py | 382 + .../content_service/transports/grpc.py | 664 + .../transports/grpc_asyncio.py | 786 + .../content_service/transports/rest.py | 2446 ++ .../content_service/transports/rest_base.py | 644 + .../services/data_scan_service/__init__.py | 22 + .../data_scan_service/async_client.py | 1755 + .../services/data_scan_service/client.py | 2154 ++ .../services/data_scan_service/pagers.py | 305 + .../data_scan_service/transports/README.rst | 9 + .../data_scan_service/transports/__init__.py | 38 + .../data_scan_service/transports/base.py | 363 + .../data_scan_service/transports/grpc.py | 691 + .../transports/grpc_asyncio.py | 782 + .../data_scan_service/transports/rest.py | 2620 ++ .../data_scan_service/transports/rest_base.py | 645 + .../data_taxonomy_service/__init__.py | 22 + .../data_taxonomy_service/async_client.py | 2543 ++ .../services/data_taxonomy_service/client.py | 2917 ++ .../services/data_taxonomy_service/pagers.py | 444 + .../transports/README.rst | 9 + .../transports/__init__.py | 38 + .../data_taxonomy_service/transports/base.py | 448 + .../data_taxonomy_service/transports/grpc.py | 849 + .../transports/grpc_asyncio.py | 970 + .../data_taxonomy_service/transports/rest.py | 3660 ++ .../transports/rest_base.py | 883 + .../services/dataplex_service/__init__.py | 22 + .../services/dataplex_service/async_client.py | 4716 +++ .../services/dataplex_service/client.py | 5118 +++ .../services/dataplex_service/pagers.py | 1420 + .../dataplex_service/transports/README.rst | 9 + .../dataplex_service/transports/__init__.py | 38 + .../dataplex_service/transports/base.py | 838 + .../dataplex_service/transports/grpc.py | 1323 + .../transports/grpc_asyncio.py | 1669 + .../dataplex_service/transports/rest.py | 6707 ++++ .../dataplex_service/transports/rest_base.py | 1612 + .../services/metadata_service/__init__.py | 22 + .../services/metadata_service/async_client.py | 1571 + .../services/metadata_service/client.py | 1953 ++ .../services/metadata_service/pagers.py | 305 + .../metadata_service/transports/README.rst | 9 + .../metadata_service/transports/__init__.py | 38 + .../metadata_service/transports/base.py | 394 + .../metadata_service/transports/grpc.py | 669 + .../transports/grpc_asyncio.py | 796 + .../metadata_service/transports/rest.py | 2403 ++ .../metadata_service/transports/rest_base.py | 631 + .../cloud/dataplex_v1/types/__init__.py | 484 + .../google/cloud/dataplex_v1/types/analyze.py | 492 + .../dataplex_v1/types/business_glossary.py | 876 + .../google/cloud/dataplex_v1/types/catalog.py | 3079 ++ .../v1/google/cloud/dataplex_v1/types/cmek.py | 356 + .../google/cloud/dataplex_v1/types/content.py | 227 + .../cloud/dataplex_v1/types/data_discovery.py | 364 + .../cloud/dataplex_v1/types/data_profile.py | 546 + .../cloud/dataplex_v1/types/data_quality.py | 962 + .../cloud/dataplex_v1/types/data_taxonomy.py | 972 + .../cloud/dataplex_v1/types/datascans.py | 931 + .../dataplex_v1/types/datascans_common.py | 62 + .../v1/google/cloud/dataplex_v1/types/logs.py | 1467 + .../cloud/dataplex_v1/types/metadata_.py | 1182 + .../cloud/dataplex_v1/types/processing.py | 192 + .../cloud/dataplex_v1/types/resources.py | 1436 + .../cloud/dataplex_v1/types/security.py | 90 + .../google/cloud/dataplex_v1/types/service.py | 1395 + .../google/cloud/dataplex_v1/types/tasks.py | 753 + .../google-cloud-dataplex/v1/mypy.ini | 3 + .../google-cloud-dataplex/v1/noxfile.py | 591 + ..._glossary_service_create_glossary_async.py | 57 + ..._service_create_glossary_category_async.py | 57 + ...y_service_create_glossary_category_sync.py | 57 + ...s_glossary_service_create_glossary_sync.py | 57 + ...sary_service_create_glossary_term_async.py | 57 + ...ssary_service_create_glossary_term_sync.py | 57 + ..._glossary_service_delete_glossary_async.py | 56 + ..._service_delete_glossary_category_async.py | 50 + ...y_service_delete_glossary_category_sync.py | 50 + ...s_glossary_service_delete_glossary_sync.py | 56 + ...sary_service_delete_glossary_term_async.py | 50 + ...ssary_service_delete_glossary_term_sync.py | 50 + ...ess_glossary_service_get_glossary_async.py | 52 + ...ary_service_get_glossary_category_async.py | 52 + ...sary_service_get_glossary_category_sync.py | 52 + ...ness_glossary_service_get_glossary_sync.py | 52 + ...lossary_service_get_glossary_term_async.py | 52 + ...glossary_service_get_glossary_term_sync.py | 52 + ..._glossary_service_list_glossaries_async.py | 53 + ...s_glossary_service_list_glossaries_sync.py | 53 + ..._service_list_glossary_categories_async.py | 53 + ...y_service_list_glossary_categories_sync.py | 53 + ...ssary_service_list_glossary_terms_async.py | 53 + ...ossary_service_list_glossary_terms_sync.py | 53 + ..._glossary_service_update_glossary_async.py | 55 + ..._service_update_glossary_category_async.py | 55 + ...y_service_update_glossary_category_sync.py | 55 + ...s_glossary_service_update_glossary_sync.py | 55 + ...sary_service_update_glossary_term_async.py | 55 + ...ssary_service_update_glossary_term_sync.py | 55 + ...talog_service_cancel_metadata_job_async.py | 50 + ...atalog_service_cancel_metadata_job_sync.py | 50 + ...atalog_service_create_aspect_type_async.py | 62 + ...catalog_service_create_aspect_type_sync.py | 62 + ...ated_catalog_service_create_entry_async.py | 57 + ...atalog_service_create_entry_group_async.py | 57 + ...catalog_service_create_entry_group_sync.py | 57 + ...catalog_service_create_entry_link_async.py | 59 + ..._catalog_service_create_entry_link_sync.py | 59 + ...rated_catalog_service_create_entry_sync.py | 57 + ...catalog_service_create_entry_type_async.py | 57 + ..._catalog_service_create_entry_type_sync.py | 57 + ...talog_service_create_metadata_job_async.py | 64 + ...atalog_service_create_metadata_job_sync.py | 64 + ...atalog_service_delete_aspect_type_async.py | 56 + ...catalog_service_delete_aspect_type_sync.py | 56 + ...ated_catalog_service_delete_entry_async.py | 52 + ...atalog_service_delete_entry_group_async.py | 56 + ...catalog_service_delete_entry_group_sync.py | 56 + ...catalog_service_delete_entry_link_async.py | 52 + ..._catalog_service_delete_entry_link_sync.py | 52 + ...rated_catalog_service_delete_entry_sync.py | 52 + ...catalog_service_delete_entry_type_async.py | 56 + ..._catalog_service_delete_entry_type_sync.py | 56 + ...d_catalog_service_get_aspect_type_async.py | 52 + ...ed_catalog_service_get_aspect_type_sync.py | 52 + ...nerated_catalog_service_get_entry_async.py | 52 + ...d_catalog_service_get_entry_group_async.py | 52 + ...ed_catalog_service_get_entry_group_sync.py | 52 + ...ed_catalog_service_get_entry_link_async.py | 52 + ...ted_catalog_service_get_entry_link_sync.py | 52 + ...enerated_catalog_service_get_entry_sync.py | 52 + ...ed_catalog_service_get_entry_type_async.py | 52 + ...ted_catalog_service_get_entry_type_sync.py | 52 + ..._catalog_service_get_metadata_job_async.py | 52 + ...d_catalog_service_get_metadata_job_sync.py | 52 + ...catalog_service_list_aspect_types_async.py | 53 + ..._catalog_service_list_aspect_types_sync.py | 53 + ...ated_catalog_service_list_entries_async.py | 53 + ...rated_catalog_service_list_entries_sync.py | 53 + ...catalog_service_list_entry_groups_async.py | 53 + ..._catalog_service_list_entry_groups_sync.py | 53 + ..._catalog_service_list_entry_types_async.py | 53 + ...d_catalog_service_list_entry_types_sync.py | 53 + ...atalog_service_list_metadata_jobs_async.py | 53 + ...catalog_service_list_metadata_jobs_sync.py | 53 + ...ated_catalog_service_lookup_entry_async.py | 53 + ...rated_catalog_service_lookup_entry_sync.py | 53 + ...ed_catalog_service_search_entries_async.py | 54 + ...ted_catalog_service_search_entries_sync.py | 54 + ...atalog_service_update_aspect_type_async.py | 60 + ...catalog_service_update_aspect_type_sync.py | 60 + ...ated_catalog_service_update_entry_async.py | 55 + ...atalog_service_update_entry_group_async.py | 55 + ...catalog_service_update_entry_group_sync.py | 55 + ...rated_catalog_service_update_entry_sync.py | 55 + ...catalog_service_update_entry_type_async.py | 55 + ..._catalog_service_update_entry_type_sync.py | 55 + ..._service_create_encryption_config_async.py | 57 + ...k_service_create_encryption_config_sync.py | 57 + ..._service_delete_encryption_config_async.py | 56 + ...k_service_delete_encryption_config_sync.py | 56 + ...mek_service_get_encryption_config_async.py | 52 + ...cmek_service_get_encryption_config_sync.py | 52 + ...k_service_list_encryption_configs_async.py | 53 + ...ek_service_list_encryption_configs_sync.py | 53 + ..._service_update_encryption_config_async.py | 55 + ...k_service_update_encryption_config_sync.py | 55 + ...ed_content_service_create_content_async.py | 58 + ...ted_content_service_create_content_sync.py | 58 + ...ed_content_service_delete_content_async.py | 50 + ...ted_content_service_delete_content_sync.py | 50 + ...rated_content_service_get_content_async.py | 52 + ...erated_content_service_get_content_sync.py | 52 + ...ed_content_service_get_iam_policy_async.py | 53 + ...ted_content_service_get_iam_policy_sync.py | 53 + ...ated_content_service_list_content_async.py | 53 + ...rated_content_service_list_content_sync.py | 53 + ...ed_content_service_set_iam_policy_async.py | 53 + ...ted_content_service_set_iam_policy_sync.py | 53 + ...tent_service_test_iam_permissions_async.py | 54 + ...ntent_service_test_iam_permissions_sync.py | 54 + ...ed_content_service_update_content_async.py | 57 + ...ted_content_service_update_content_sync.py | 57 + ...ata_scan_service_create_data_scan_async.py | 62 + ...data_scan_service_create_data_scan_sync.py | 62 + ...ata_scan_service_delete_data_scan_async.py | 56 + ...data_scan_service_delete_data_scan_sync.py | 56 + ...rvice_generate_data_quality_rules_async.py | 52 + ...ervice_generate_data_quality_rules_sync.py | 52 + ...d_data_scan_service_get_data_scan_async.py | 52 + ...ta_scan_service_get_data_scan_job_async.py | 52 + ...ata_scan_service_get_data_scan_job_sync.py | 52 + ...ed_data_scan_service_get_data_scan_sync.py | 52 + ..._scan_service_list_data_scan_jobs_async.py | 53 + ...a_scan_service_list_data_scan_jobs_sync.py | 53 + ...data_scan_service_list_data_scans_async.py | 53 + ..._data_scan_service_list_data_scans_sync.py | 53 + ...d_data_scan_service_run_data_scan_async.py | 52 + ...ed_data_scan_service_run_data_scan_sync.py | 52 + ...ata_scan_service_update_data_scan_async.py | 60 + ...data_scan_service_update_data_scan_sync.py | 60 + ...omy_service_create_data_attribute_async.py | 57 + ...ice_create_data_attribute_binding_async.py | 61 + ...vice_create_data_attribute_binding_sync.py | 61 + ...nomy_service_create_data_attribute_sync.py | 57 + ...nomy_service_create_data_taxonomy_async.py | 57 + ...onomy_service_create_data_taxonomy_sync.py | 57 + ...omy_service_delete_data_attribute_async.py | 56 + ...ice_delete_data_attribute_binding_async.py | 57 + ...vice_delete_data_attribute_binding_sync.py | 57 + ...nomy_service_delete_data_attribute_sync.py | 56 + ...nomy_service_delete_data_taxonomy_async.py | 56 + ...onomy_service_delete_data_taxonomy_sync.py | 56 + ...xonomy_service_get_data_attribute_async.py | 52 + ...ervice_get_data_attribute_binding_async.py | 52 + ...service_get_data_attribute_binding_sync.py | 52 + ...axonomy_service_get_data_attribute_sync.py | 52 + ...axonomy_service_get_data_taxonomy_async.py | 52 + ...taxonomy_service_get_data_taxonomy_sync.py | 52 + ...vice_list_data_attribute_bindings_async.py | 53 + ...rvice_list_data_attribute_bindings_sync.py | 53 + ...nomy_service_list_data_attributes_async.py | 53 + ...onomy_service_list_data_attributes_sync.py | 53 + ...nomy_service_list_data_taxonomies_async.py | 53 + ...onomy_service_list_data_taxonomies_sync.py | 53 + ...omy_service_update_data_attribute_async.py | 55 + ...ice_update_data_attribute_binding_async.py | 59 + ...vice_update_data_attribute_binding_sync.py | 59 + ...nomy_service_update_data_attribute_sync.py | 55 + ...nomy_service_update_data_taxonomy_async.py | 55 + ...onomy_service_update_data_taxonomy_sync.py | 55 + ...rated_dataplex_service_cancel_job_async.py | 50 + ...erated_dataplex_service_cancel_job_sync.py | 50 + ...ted_dataplex_service_create_asset_async.py | 61 + ...ated_dataplex_service_create_asset_sync.py | 61 + ...taplex_service_create_environment_async.py | 61 + ...ataplex_service_create_environment_sync.py | 61 + ...ated_dataplex_service_create_lake_async.py | 57 + ...rated_dataplex_service_create_lake_sync.py | 57 + ...ated_dataplex_service_create_task_async.py | 64 + ...rated_dataplex_service_create_task_sync.py | 64 + ...ated_dataplex_service_create_zone_async.py | 62 + ...rated_dataplex_service_create_zone_sync.py | 62 + ...ted_dataplex_service_delete_asset_async.py | 56 + ...ated_dataplex_service_delete_asset_sync.py | 56 + ...taplex_service_delete_environment_async.py | 56 + ...ataplex_service_delete_environment_sync.py | 56 + ...ated_dataplex_service_delete_lake_async.py | 56 + ...rated_dataplex_service_delete_lake_sync.py | 56 + ...ated_dataplex_service_delete_task_async.py | 56 + ...rated_dataplex_service_delete_task_sync.py | 56 + ...ated_dataplex_service_delete_zone_async.py | 56 + ...rated_dataplex_service_delete_zone_sync.py | 56 + ...erated_dataplex_service_get_asset_async.py | 52 + ...nerated_dataplex_service_get_asset_sync.py | 52 + ..._dataplex_service_get_environment_async.py | 52 + ...d_dataplex_service_get_environment_sync.py | 52 + ...enerated_dataplex_service_get_job_async.py | 52 + ...generated_dataplex_service_get_job_sync.py | 52 + ...nerated_dataplex_service_get_lake_async.py | 52 + ...enerated_dataplex_service_get_lake_sync.py | 52 + ...nerated_dataplex_service_get_task_async.py | 52 + ...enerated_dataplex_service_get_task_sync.py | 52 + ...nerated_dataplex_service_get_zone_async.py | 52 + ...enerated_dataplex_service_get_zone_sync.py | 52 + ...taplex_service_list_asset_actions_async.py | 53 + ...ataplex_service_list_asset_actions_sync.py | 53 + ...ated_dataplex_service_list_assets_async.py | 53 + ...rated_dataplex_service_list_assets_sync.py | 53 + ...ataplex_service_list_environments_async.py | 53 + ...dataplex_service_list_environments_sync.py | 53 + ...erated_dataplex_service_list_jobs_async.py | 53 + ...nerated_dataplex_service_list_jobs_sync.py | 53 + ...ataplex_service_list_lake_actions_async.py | 53 + ...dataplex_service_list_lake_actions_sync.py | 53 + ...rated_dataplex_service_list_lakes_async.py | 53 + ...erated_dataplex_service_list_lakes_sync.py | 53 + ...ed_dataplex_service_list_sessions_async.py | 53 + ...ted_dataplex_service_list_sessions_sync.py | 53 + ...rated_dataplex_service_list_tasks_async.py | 53 + ...erated_dataplex_service_list_tasks_sync.py | 53 + ...ataplex_service_list_zone_actions_async.py | 53 + ...dataplex_service_list_zone_actions_sync.py | 53 + ...rated_dataplex_service_list_zones_async.py | 53 + ...erated_dataplex_service_list_zones_sync.py | 53 + ...nerated_dataplex_service_run_task_async.py | 52 + ...enerated_dataplex_service_run_task_sync.py | 52 + ...ted_dataplex_service_update_asset_async.py | 59 + ...ated_dataplex_service_update_asset_sync.py | 59 + ...taplex_service_update_environment_async.py | 59 + ...ataplex_service_update_environment_sync.py | 59 + ...ated_dataplex_service_update_lake_async.py | 55 + ...rated_dataplex_service_update_lake_sync.py | 55 + ...ated_dataplex_service_update_task_async.py | 62 + ...rated_dataplex_service_update_task_sync.py | 62 + ...ated_dataplex_service_update_zone_async.py | 60 + ...rated_dataplex_service_update_zone_sync.py | 60 + ...ed_metadata_service_create_entity_async.py | 62 + ...ted_metadata_service_create_entity_sync.py | 62 + ...metadata_service_create_partition_async.py | 57 + ..._metadata_service_create_partition_sync.py | 57 + ...ed_metadata_service_delete_entity_async.py | 51 + ...ted_metadata_service_delete_entity_sync.py | 51 + ...metadata_service_delete_partition_async.py | 50 + ..._metadata_service_delete_partition_sync.py | 50 + ...rated_metadata_service_get_entity_async.py | 52 + ...erated_metadata_service_get_entity_sync.py | 52 + ...ed_metadata_service_get_partition_async.py | 52 + ...ted_metadata_service_get_partition_sync.py | 52 + ...ed_metadata_service_list_entities_async.py | 54 + ...ted_metadata_service_list_entities_sync.py | 54 + ..._metadata_service_list_partitions_async.py | 53 + ...d_metadata_service_list_partitions_sync.py | 53 + ...ed_metadata_service_update_entity_async.py | 61 + ...ted_metadata_service_update_entity_sync.py | 61 + ...pet_metadata_google.cloud.dataplex.v1.json | 20224 +++++++++++ .../v1/scripts/fixup_dataplex_v1_keywords.py | 298 + .../google-cloud-dataplex/v1/setup.py | 99 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.13.txt | 12 + .../v1/testing/constraints-3.7.txt | 11 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/dataplex_v1/__init__.py | 16 + .../test_business_glossary_service.py | 14069 ++++++++ .../gapic/dataplex_v1/test_catalog_service.py | 24562 ++++++++++++++ .../gapic/dataplex_v1/test_cmek_service.py | 6432 ++++ .../gapic/dataplex_v1/test_content_service.py | 8115 +++++ .../dataplex_v1/test_data_scan_service.py | 9435 ++++++ .../dataplex_v1/test_data_taxonomy_service.py | 14068 ++++++++ .../dataplex_v1/test_dataplex_service.py | 28263 ++++++++++++++++ .../dataplex_v1/test_metadata_service.py | 9404 +++++ 405 files changed, 271824 insertions(+) create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/LICENSE create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py create mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc new file mode 100644 index 000000000000..8df508b38cbc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dataplex/__init__.py + google/cloud/dataplex/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 new file mode 100644 index 000000000000..90316de21489 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/LICENSE b/owl-bot-staging/google-cloud-dataplex/v1/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in new file mode 100644 index 000000000000..dae249ec8976 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/README.rst new file mode 100644 index 000000000000..7b2028d76ab9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Dataplex API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dataplex API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html b/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html new file mode 100644 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py new file mode 100644 index 000000000000..5128564a815e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dataplex documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dataplex" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-dataplex", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dataplex-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dataplex.tex", + u"google-cloud-dataplex Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dataplex", + "google-cloud-dataplex Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dataplex", + "google-cloud-dataplex Documentation", + author, + "google-cloud-dataplex", + "google-cloud-dataplex Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst new file mode 100644 index 000000000000..278bc27fcd30 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst @@ -0,0 +1,10 @@ +BusinessGlossaryService +----------------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.business_glossary_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.business_glossary_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst new file mode 100644 index 000000000000..ef6306fadb87 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst @@ -0,0 +1,10 @@ +CatalogService +-------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.catalog_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.catalog_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst new file mode 100644 index 000000000000..5eae398d0f87 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst @@ -0,0 +1,10 @@ +CmekService +----------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.cmek_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.cmek_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst new file mode 100644 index 000000000000..ce3774365501 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst @@ -0,0 +1,10 @@ +ContentService +-------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.content_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.content_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst new file mode 100644 index 000000000000..c9281cda5823 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst @@ -0,0 +1,10 @@ +DataScanService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.data_scan_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.data_scan_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst new file mode 100644 index 000000000000..b2a185a3c43f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst @@ -0,0 +1,10 @@ +DataTaxonomyService +------------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst new file mode 100644 index 000000000000..5ecb20ccef96 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst @@ -0,0 +1,10 @@ +DataplexService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.dataplex_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.dataplex_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst new file mode 100644 index 000000000000..d5bf19660ab5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst @@ -0,0 +1,10 @@ +MetadataService +--------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.metadata_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.metadata_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst new file mode 100644 index 000000000000..4f97a5efe7f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst @@ -0,0 +1,13 @@ +Services for Google Cloud Dataplex v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + business_glossary_service + catalog_service + cmek_service + content_service + dataplex_service + data_scan_service + data_taxonomy_service + metadata_service diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst new file mode 100644 index 000000000000..391acd51ef80 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dataplex v1 API +====================================== + +.. automodule:: google.cloud.dataplex_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst new file mode 100644 index 000000000000..03a62592ed5f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dataplex_v1/services_ + dataplex_v1/types_ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py new file mode 100644 index 000000000000..4a17145224d8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataplex import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dataplex_v1.services.business_glossary_service.client import BusinessGlossaryServiceClient +from google.cloud.dataplex_v1.services.business_glossary_service.async_client import BusinessGlossaryServiceAsyncClient +from google.cloud.dataplex_v1.services.catalog_service.client import CatalogServiceClient +from google.cloud.dataplex_v1.services.catalog_service.async_client import CatalogServiceAsyncClient +from google.cloud.dataplex_v1.services.cmek_service.client import CmekServiceClient +from google.cloud.dataplex_v1.services.cmek_service.async_client import CmekServiceAsyncClient +from google.cloud.dataplex_v1.services.content_service.client import ContentServiceClient +from google.cloud.dataplex_v1.services.content_service.async_client import ContentServiceAsyncClient +from google.cloud.dataplex_v1.services.dataplex_service.client import DataplexServiceClient +from google.cloud.dataplex_v1.services.dataplex_service.async_client import DataplexServiceAsyncClient +from google.cloud.dataplex_v1.services.data_scan_service.client import DataScanServiceClient +from google.cloud.dataplex_v1.services.data_scan_service.async_client import DataScanServiceAsyncClient +from google.cloud.dataplex_v1.services.data_taxonomy_service.client import DataTaxonomyServiceClient +from google.cloud.dataplex_v1.services.data_taxonomy_service.async_client import DataTaxonomyServiceAsyncClient +from google.cloud.dataplex_v1.services.metadata_service.client import MetadataServiceClient +from google.cloud.dataplex_v1.services.metadata_service.async_client import MetadataServiceAsyncClient + +from google.cloud.dataplex_v1.types.analyze import Content +from google.cloud.dataplex_v1.types.analyze import Environment +from google.cloud.dataplex_v1.types.analyze import Session +from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryCategoryRequest +from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryRequest +from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryTermRequest +from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryCategoryRequest +from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryRequest +from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryTermRequest +from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryCategoryRequest +from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryRequest +from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryTermRequest +from google.cloud.dataplex_v1.types.business_glossary import Glossary +from google.cloud.dataplex_v1.types.business_glossary import GlossaryCategory +from google.cloud.dataplex_v1.types.business_glossary import GlossaryTerm +from google.cloud.dataplex_v1.types.business_glossary import ListGlossariesRequest +from google.cloud.dataplex_v1.types.business_glossary import ListGlossariesResponse +from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryCategoriesRequest +from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryCategoriesResponse +from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryTermsRequest +from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryTermsResponse +from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryCategoryRequest +from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryRequest +from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryTermRequest +from google.cloud.dataplex_v1.types.catalog import Aspect +from google.cloud.dataplex_v1.types.catalog import AspectSource +from google.cloud.dataplex_v1.types.catalog import AspectType +from google.cloud.dataplex_v1.types.catalog import CancelMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import CreateAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryLinkRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryRequest +from google.cloud.dataplex_v1.types.catalog import CreateEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import CreateMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import DeleteAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryLinkRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryRequest +from google.cloud.dataplex_v1.types.catalog import DeleteEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import Entry +from google.cloud.dataplex_v1.types.catalog import EntryGroup +from google.cloud.dataplex_v1.types.catalog import EntryLink +from google.cloud.dataplex_v1.types.catalog import EntrySource +from google.cloud.dataplex_v1.types.catalog import EntryType +from google.cloud.dataplex_v1.types.catalog import GetAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryLinkRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryRequest +from google.cloud.dataplex_v1.types.catalog import GetEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import GetMetadataJobRequest +from google.cloud.dataplex_v1.types.catalog import ImportItem +from google.cloud.dataplex_v1.types.catalog import ListAspectTypesRequest +from google.cloud.dataplex_v1.types.catalog import ListAspectTypesResponse +from google.cloud.dataplex_v1.types.catalog import ListEntriesRequest +from google.cloud.dataplex_v1.types.catalog import ListEntriesResponse +from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsRequest +from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsResponse +from google.cloud.dataplex_v1.types.catalog import ListEntryTypesRequest +from google.cloud.dataplex_v1.types.catalog import ListEntryTypesResponse +from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsRequest +from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsResponse +from google.cloud.dataplex_v1.types.catalog import LookupEntryRequest +from google.cloud.dataplex_v1.types.catalog import MetadataJob +from google.cloud.dataplex_v1.types.catalog import SearchEntriesRequest +from google.cloud.dataplex_v1.types.catalog import SearchEntriesResponse +from google.cloud.dataplex_v1.types.catalog import SearchEntriesResult +from google.cloud.dataplex_v1.types.catalog import UpdateAspectTypeRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryGroupRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryRequest +from google.cloud.dataplex_v1.types.catalog import UpdateEntryTypeRequest +from google.cloud.dataplex_v1.types.catalog import EntryView +from google.cloud.dataplex_v1.types.catalog import TransferStatus +from google.cloud.dataplex_v1.types.cmek import CreateEncryptionConfigRequest +from google.cloud.dataplex_v1.types.cmek import DeleteEncryptionConfigRequest +from google.cloud.dataplex_v1.types.cmek import EncryptionConfig +from google.cloud.dataplex_v1.types.cmek import GetEncryptionConfigRequest +from google.cloud.dataplex_v1.types.cmek import ListEncryptionConfigsRequest +from google.cloud.dataplex_v1.types.cmek import ListEncryptionConfigsResponse +from google.cloud.dataplex_v1.types.cmek import UpdateEncryptionConfigRequest +from google.cloud.dataplex_v1.types.content import CreateContentRequest +from google.cloud.dataplex_v1.types.content import DeleteContentRequest +from google.cloud.dataplex_v1.types.content import GetContentRequest +from google.cloud.dataplex_v1.types.content import ListContentRequest +from google.cloud.dataplex_v1.types.content import ListContentResponse +from google.cloud.dataplex_v1.types.content import UpdateContentRequest +from google.cloud.dataplex_v1.types.data_discovery import DataDiscoveryResult +from google.cloud.dataplex_v1.types.data_discovery import DataDiscoverySpec +from google.cloud.dataplex_v1.types.data_profile import DataProfileResult +from google.cloud.dataplex_v1.types.data_profile import DataProfileSpec +from google.cloud.dataplex_v1.types.data_quality import DataQualityColumnResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityDimension +from google.cloud.dataplex_v1.types.data_quality import DataQualityDimensionResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityResult +from google.cloud.dataplex_v1.types.data_quality import DataQualityRule +from google.cloud.dataplex_v1.types.data_quality import DataQualityRuleResult +from google.cloud.dataplex_v1.types.data_quality import DataQualitySpec +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DataAttribute +from google.cloud.dataplex_v1.types.data_taxonomy import DataAttributeBinding +from google.cloud.dataplex_v1.types.data_taxonomy import DataTaxonomy +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import GetDataTaxonomyRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsResponse +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesResponse +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesRequest +from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesResponse +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeBindingRequest +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeRequest +from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataTaxonomyRequest +from google.cloud.dataplex_v1.types.datascans import CreateDataScanRequest +from google.cloud.dataplex_v1.types.datascans import DataScan +from google.cloud.dataplex_v1.types.datascans import DataScanJob +from google.cloud.dataplex_v1.types.datascans import DeleteDataScanRequest +from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesRequest +from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesResponse +from google.cloud.dataplex_v1.types.datascans import GetDataScanJobRequest +from google.cloud.dataplex_v1.types.datascans import GetDataScanRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsResponse +from google.cloud.dataplex_v1.types.datascans import ListDataScansRequest +from google.cloud.dataplex_v1.types.datascans import ListDataScansResponse +from google.cloud.dataplex_v1.types.datascans import RunDataScanRequest +from google.cloud.dataplex_v1.types.datascans import RunDataScanResponse +from google.cloud.dataplex_v1.types.datascans import UpdateDataScanRequest +from google.cloud.dataplex_v1.types.datascans import DataScanType +from google.cloud.dataplex_v1.types.datascans_common import DataScanCatalogPublishingStatus +from google.cloud.dataplex_v1.types.logs import BusinessGlossaryEvent +from google.cloud.dataplex_v1.types.logs import DataQualityScanRuleResult +from google.cloud.dataplex_v1.types.logs import DataScanEvent +from google.cloud.dataplex_v1.types.logs import DiscoveryEvent +from google.cloud.dataplex_v1.types.logs import EntryLinkEvent +from google.cloud.dataplex_v1.types.logs import GovernanceEvent +from google.cloud.dataplex_v1.types.logs import JobEvent +from google.cloud.dataplex_v1.types.logs import SessionEvent +from google.cloud.dataplex_v1.types.metadata_ import CreateEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import CreatePartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import DeleteEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import DeletePartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import Entity +from google.cloud.dataplex_v1.types.metadata_ import GetEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import GetPartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesRequest +from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesResponse +from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsRequest +from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsResponse +from google.cloud.dataplex_v1.types.metadata_ import Partition +from google.cloud.dataplex_v1.types.metadata_ import Schema +from google.cloud.dataplex_v1.types.metadata_ import StorageAccess +from google.cloud.dataplex_v1.types.metadata_ import StorageFormat +from google.cloud.dataplex_v1.types.metadata_ import UpdateEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import StorageSystem +from google.cloud.dataplex_v1.types.processing import DataSource +from google.cloud.dataplex_v1.types.processing import ScannedData +from google.cloud.dataplex_v1.types.processing import Trigger +from google.cloud.dataplex_v1.types.resources import Action +from google.cloud.dataplex_v1.types.resources import Asset +from google.cloud.dataplex_v1.types.resources import AssetStatus +from google.cloud.dataplex_v1.types.resources import Lake +from google.cloud.dataplex_v1.types.resources import Zone +from google.cloud.dataplex_v1.types.resources import State +from google.cloud.dataplex_v1.types.security import DataAccessSpec +from google.cloud.dataplex_v1.types.security import ResourceAccessSpec +from google.cloud.dataplex_v1.types.service import CancelJobRequest +from google.cloud.dataplex_v1.types.service import CreateAssetRequest +from google.cloud.dataplex_v1.types.service import CreateEnvironmentRequest +from google.cloud.dataplex_v1.types.service import CreateLakeRequest +from google.cloud.dataplex_v1.types.service import CreateTaskRequest +from google.cloud.dataplex_v1.types.service import CreateZoneRequest +from google.cloud.dataplex_v1.types.service import DeleteAssetRequest +from google.cloud.dataplex_v1.types.service import DeleteEnvironmentRequest +from google.cloud.dataplex_v1.types.service import DeleteLakeRequest +from google.cloud.dataplex_v1.types.service import DeleteTaskRequest +from google.cloud.dataplex_v1.types.service import DeleteZoneRequest +from google.cloud.dataplex_v1.types.service import GetAssetRequest +from google.cloud.dataplex_v1.types.service import GetEnvironmentRequest +from google.cloud.dataplex_v1.types.service import GetJobRequest +from google.cloud.dataplex_v1.types.service import GetLakeRequest +from google.cloud.dataplex_v1.types.service import GetTaskRequest +from google.cloud.dataplex_v1.types.service import GetZoneRequest +from google.cloud.dataplex_v1.types.service import ListActionsResponse +from google.cloud.dataplex_v1.types.service import ListAssetActionsRequest +from google.cloud.dataplex_v1.types.service import ListAssetsRequest +from google.cloud.dataplex_v1.types.service import ListAssetsResponse +from google.cloud.dataplex_v1.types.service import ListEnvironmentsRequest +from google.cloud.dataplex_v1.types.service import ListEnvironmentsResponse +from google.cloud.dataplex_v1.types.service import ListJobsRequest +from google.cloud.dataplex_v1.types.service import ListJobsResponse +from google.cloud.dataplex_v1.types.service import ListLakeActionsRequest +from google.cloud.dataplex_v1.types.service import ListLakesRequest +from google.cloud.dataplex_v1.types.service import ListLakesResponse +from google.cloud.dataplex_v1.types.service import ListSessionsRequest +from google.cloud.dataplex_v1.types.service import ListSessionsResponse +from google.cloud.dataplex_v1.types.service import ListTasksRequest +from google.cloud.dataplex_v1.types.service import ListTasksResponse +from google.cloud.dataplex_v1.types.service import ListZoneActionsRequest +from google.cloud.dataplex_v1.types.service import ListZonesRequest +from google.cloud.dataplex_v1.types.service import ListZonesResponse +from google.cloud.dataplex_v1.types.service import OperationMetadata +from google.cloud.dataplex_v1.types.service import RunTaskRequest +from google.cloud.dataplex_v1.types.service import RunTaskResponse +from google.cloud.dataplex_v1.types.service import UpdateAssetRequest +from google.cloud.dataplex_v1.types.service import UpdateEnvironmentRequest +from google.cloud.dataplex_v1.types.service import UpdateLakeRequest +from google.cloud.dataplex_v1.types.service import UpdateTaskRequest +from google.cloud.dataplex_v1.types.service import UpdateZoneRequest +from google.cloud.dataplex_v1.types.tasks import Job +from google.cloud.dataplex_v1.types.tasks import Task + +__all__ = ('BusinessGlossaryServiceClient', + 'BusinessGlossaryServiceAsyncClient', + 'CatalogServiceClient', + 'CatalogServiceAsyncClient', + 'CmekServiceClient', + 'CmekServiceAsyncClient', + 'ContentServiceClient', + 'ContentServiceAsyncClient', + 'DataplexServiceClient', + 'DataplexServiceAsyncClient', + 'DataScanServiceClient', + 'DataScanServiceAsyncClient', + 'DataTaxonomyServiceClient', + 'DataTaxonomyServiceAsyncClient', + 'MetadataServiceClient', + 'MetadataServiceAsyncClient', + 'Content', + 'Environment', + 'Session', + 'CreateGlossaryCategoryRequest', + 'CreateGlossaryRequest', + 'CreateGlossaryTermRequest', + 'DeleteGlossaryCategoryRequest', + 'DeleteGlossaryRequest', + 'DeleteGlossaryTermRequest', + 'GetGlossaryCategoryRequest', + 'GetGlossaryRequest', + 'GetGlossaryTermRequest', + 'Glossary', + 'GlossaryCategory', + 'GlossaryTerm', + 'ListGlossariesRequest', + 'ListGlossariesResponse', + 'ListGlossaryCategoriesRequest', + 'ListGlossaryCategoriesResponse', + 'ListGlossaryTermsRequest', + 'ListGlossaryTermsResponse', + 'UpdateGlossaryCategoryRequest', + 'UpdateGlossaryRequest', + 'UpdateGlossaryTermRequest', + 'Aspect', + 'AspectSource', + 'AspectType', + 'CancelMetadataJobRequest', + 'CreateAspectTypeRequest', + 'CreateEntryGroupRequest', + 'CreateEntryLinkRequest', + 'CreateEntryRequest', + 'CreateEntryTypeRequest', + 'CreateMetadataJobRequest', + 'DeleteAspectTypeRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryLinkRequest', + 'DeleteEntryRequest', + 'DeleteEntryTypeRequest', + 'Entry', + 'EntryGroup', + 'EntryLink', + 'EntrySource', + 'EntryType', + 'GetAspectTypeRequest', + 'GetEntryGroupRequest', + 'GetEntryLinkRequest', + 'GetEntryRequest', + 'GetEntryTypeRequest', + 'GetMetadataJobRequest', + 'ImportItem', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'LookupEntryRequest', + 'MetadataJob', + 'SearchEntriesRequest', + 'SearchEntriesResponse', + 'SearchEntriesResult', + 'UpdateAspectTypeRequest', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateEntryTypeRequest', + 'EntryView', + 'TransferStatus', + 'CreateEncryptionConfigRequest', + 'DeleteEncryptionConfigRequest', + 'EncryptionConfig', + 'GetEncryptionConfigRequest', + 'ListEncryptionConfigsRequest', + 'ListEncryptionConfigsResponse', + 'UpdateEncryptionConfigRequest', + 'CreateContentRequest', + 'DeleteContentRequest', + 'GetContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'UpdateContentRequest', + 'DataDiscoveryResult', + 'DataDiscoverySpec', + 'DataProfileResult', + 'DataProfileSpec', + 'DataQualityColumnResult', + 'DataQualityDimension', + 'DataQualityDimensionResult', + 'DataQualityResult', + 'DataQualityRule', + 'DataQualityRuleResult', + 'DataQualitySpec', + 'CreateDataAttributeBindingRequest', + 'CreateDataAttributeRequest', + 'CreateDataTaxonomyRequest', + 'DataAttribute', + 'DataAttributeBinding', + 'DataTaxonomy', + 'DeleteDataAttributeBindingRequest', + 'DeleteDataAttributeRequest', + 'DeleteDataTaxonomyRequest', + 'GetDataAttributeBindingRequest', + 'GetDataAttributeRequest', + 'GetDataTaxonomyRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'UpdateDataAttributeBindingRequest', + 'UpdateDataAttributeRequest', + 'UpdateDataTaxonomyRequest', + 'CreateDataScanRequest', + 'DataScan', + 'DataScanJob', + 'DeleteDataScanRequest', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'GetDataScanJobRequest', + 'GetDataScanRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'UpdateDataScanRequest', + 'DataScanType', + 'DataScanCatalogPublishingStatus', + 'BusinessGlossaryEvent', + 'DataQualityScanRuleResult', + 'DataScanEvent', + 'DiscoveryEvent', + 'EntryLinkEvent', + 'GovernanceEvent', + 'JobEvent', + 'SessionEvent', + 'CreateEntityRequest', + 'CreatePartitionRequest', + 'DeleteEntityRequest', + 'DeletePartitionRequest', + 'Entity', + 'GetEntityRequest', + 'GetPartitionRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'ListPartitionsRequest', + 'ListPartitionsResponse', + 'Partition', + 'Schema', + 'StorageAccess', + 'StorageFormat', + 'UpdateEntityRequest', + 'StorageSystem', + 'DataSource', + 'ScannedData', + 'Trigger', + 'Action', + 'Asset', + 'AssetStatus', + 'Lake', + 'Zone', + 'State', + 'DataAccessSpec', + 'ResourceAccessSpec', + 'CancelJobRequest', + 'CreateAssetRequest', + 'CreateEnvironmentRequest', + 'CreateLakeRequest', + 'CreateTaskRequest', + 'CreateZoneRequest', + 'DeleteAssetRequest', + 'DeleteEnvironmentRequest', + 'DeleteLakeRequest', + 'DeleteTaskRequest', + 'DeleteZoneRequest', + 'GetAssetRequest', + 'GetEnvironmentRequest', + 'GetJobRequest', + 'GetLakeRequest', + 'GetTaskRequest', + 'GetZoneRequest', + 'ListActionsResponse', + 'ListAssetActionsRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListLakeActionsRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'ListZoneActionsRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'OperationMetadata', + 'RunTaskRequest', + 'RunTaskResponse', + 'UpdateAssetRequest', + 'UpdateEnvironmentRequest', + 'UpdateLakeRequest', + 'UpdateTaskRequest', + 'UpdateZoneRequest', + 'Job', + 'Task', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed new file mode 100644 index 000000000000..c932c263028e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py new file mode 100644 index 000000000000..72503aaffdf6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataplex_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.business_glossary_service import BusinessGlossaryServiceClient +from .services.business_glossary_service import BusinessGlossaryServiceAsyncClient +from .services.catalog_service import CatalogServiceClient +from .services.catalog_service import CatalogServiceAsyncClient +from .services.cmek_service import CmekServiceClient +from .services.cmek_service import CmekServiceAsyncClient +from .services.content_service import ContentServiceClient +from .services.content_service import ContentServiceAsyncClient +from .services.dataplex_service import DataplexServiceClient +from .services.dataplex_service import DataplexServiceAsyncClient +from .services.data_scan_service import DataScanServiceClient +from .services.data_scan_service import DataScanServiceAsyncClient +from .services.data_taxonomy_service import DataTaxonomyServiceClient +from .services.data_taxonomy_service import DataTaxonomyServiceAsyncClient +from .services.metadata_service import MetadataServiceClient +from .services.metadata_service import MetadataServiceAsyncClient + +from .types.analyze import Content +from .types.analyze import Environment +from .types.analyze import Session +from .types.business_glossary import CreateGlossaryCategoryRequest +from .types.business_glossary import CreateGlossaryRequest +from .types.business_glossary import CreateGlossaryTermRequest +from .types.business_glossary import DeleteGlossaryCategoryRequest +from .types.business_glossary import DeleteGlossaryRequest +from .types.business_glossary import DeleteGlossaryTermRequest +from .types.business_glossary import GetGlossaryCategoryRequest +from .types.business_glossary import GetGlossaryRequest +from .types.business_glossary import GetGlossaryTermRequest +from .types.business_glossary import Glossary +from .types.business_glossary import GlossaryCategory +from .types.business_glossary import GlossaryTerm +from .types.business_glossary import ListGlossariesRequest +from .types.business_glossary import ListGlossariesResponse +from .types.business_glossary import ListGlossaryCategoriesRequest +from .types.business_glossary import ListGlossaryCategoriesResponse +from .types.business_glossary import ListGlossaryTermsRequest +from .types.business_glossary import ListGlossaryTermsResponse +from .types.business_glossary import UpdateGlossaryCategoryRequest +from .types.business_glossary import UpdateGlossaryRequest +from .types.business_glossary import UpdateGlossaryTermRequest +from .types.catalog import Aspect +from .types.catalog import AspectSource +from .types.catalog import AspectType +from .types.catalog import CancelMetadataJobRequest +from .types.catalog import CreateAspectTypeRequest +from .types.catalog import CreateEntryGroupRequest +from .types.catalog import CreateEntryLinkRequest +from .types.catalog import CreateEntryRequest +from .types.catalog import CreateEntryTypeRequest +from .types.catalog import CreateMetadataJobRequest +from .types.catalog import DeleteAspectTypeRequest +from .types.catalog import DeleteEntryGroupRequest +from .types.catalog import DeleteEntryLinkRequest +from .types.catalog import DeleteEntryRequest +from .types.catalog import DeleteEntryTypeRequest +from .types.catalog import Entry +from .types.catalog import EntryGroup +from .types.catalog import EntryLink +from .types.catalog import EntrySource +from .types.catalog import EntryType +from .types.catalog import GetAspectTypeRequest +from .types.catalog import GetEntryGroupRequest +from .types.catalog import GetEntryLinkRequest +from .types.catalog import GetEntryRequest +from .types.catalog import GetEntryTypeRequest +from .types.catalog import GetMetadataJobRequest +from .types.catalog import ImportItem +from .types.catalog import ListAspectTypesRequest +from .types.catalog import ListAspectTypesResponse +from .types.catalog import ListEntriesRequest +from .types.catalog import ListEntriesResponse +from .types.catalog import ListEntryGroupsRequest +from .types.catalog import ListEntryGroupsResponse +from .types.catalog import ListEntryTypesRequest +from .types.catalog import ListEntryTypesResponse +from .types.catalog import ListMetadataJobsRequest +from .types.catalog import ListMetadataJobsResponse +from .types.catalog import LookupEntryRequest +from .types.catalog import MetadataJob +from .types.catalog import SearchEntriesRequest +from .types.catalog import SearchEntriesResponse +from .types.catalog import SearchEntriesResult +from .types.catalog import UpdateAspectTypeRequest +from .types.catalog import UpdateEntryGroupRequest +from .types.catalog import UpdateEntryRequest +from .types.catalog import UpdateEntryTypeRequest +from .types.catalog import EntryView +from .types.catalog import TransferStatus +from .types.cmek import CreateEncryptionConfigRequest +from .types.cmek import DeleteEncryptionConfigRequest +from .types.cmek import EncryptionConfig +from .types.cmek import GetEncryptionConfigRequest +from .types.cmek import ListEncryptionConfigsRequest +from .types.cmek import ListEncryptionConfigsResponse +from .types.cmek import UpdateEncryptionConfigRequest +from .types.content import CreateContentRequest +from .types.content import DeleteContentRequest +from .types.content import GetContentRequest +from .types.content import ListContentRequest +from .types.content import ListContentResponse +from .types.content import UpdateContentRequest +from .types.data_discovery import DataDiscoveryResult +from .types.data_discovery import DataDiscoverySpec +from .types.data_profile import DataProfileResult +from .types.data_profile import DataProfileSpec +from .types.data_quality import DataQualityColumnResult +from .types.data_quality import DataQualityDimension +from .types.data_quality import DataQualityDimensionResult +from .types.data_quality import DataQualityResult +from .types.data_quality import DataQualityRule +from .types.data_quality import DataQualityRuleResult +from .types.data_quality import DataQualitySpec +from .types.data_taxonomy import CreateDataAttributeBindingRequest +from .types.data_taxonomy import CreateDataAttributeRequest +from .types.data_taxonomy import CreateDataTaxonomyRequest +from .types.data_taxonomy import DataAttribute +from .types.data_taxonomy import DataAttributeBinding +from .types.data_taxonomy import DataTaxonomy +from .types.data_taxonomy import DeleteDataAttributeBindingRequest +from .types.data_taxonomy import DeleteDataAttributeRequest +from .types.data_taxonomy import DeleteDataTaxonomyRequest +from .types.data_taxonomy import GetDataAttributeBindingRequest +from .types.data_taxonomy import GetDataAttributeRequest +from .types.data_taxonomy import GetDataTaxonomyRequest +from .types.data_taxonomy import ListDataAttributeBindingsRequest +from .types.data_taxonomy import ListDataAttributeBindingsResponse +from .types.data_taxonomy import ListDataAttributesRequest +from .types.data_taxonomy import ListDataAttributesResponse +from .types.data_taxonomy import ListDataTaxonomiesRequest +from .types.data_taxonomy import ListDataTaxonomiesResponse +from .types.data_taxonomy import UpdateDataAttributeBindingRequest +from .types.data_taxonomy import UpdateDataAttributeRequest +from .types.data_taxonomy import UpdateDataTaxonomyRequest +from .types.datascans import CreateDataScanRequest +from .types.datascans import DataScan +from .types.datascans import DataScanJob +from .types.datascans import DeleteDataScanRequest +from .types.datascans import GenerateDataQualityRulesRequest +from .types.datascans import GenerateDataQualityRulesResponse +from .types.datascans import GetDataScanJobRequest +from .types.datascans import GetDataScanRequest +from .types.datascans import ListDataScanJobsRequest +from .types.datascans import ListDataScanJobsResponse +from .types.datascans import ListDataScansRequest +from .types.datascans import ListDataScansResponse +from .types.datascans import RunDataScanRequest +from .types.datascans import RunDataScanResponse +from .types.datascans import UpdateDataScanRequest +from .types.datascans import DataScanType +from .types.datascans_common import DataScanCatalogPublishingStatus +from .types.logs import BusinessGlossaryEvent +from .types.logs import DataQualityScanRuleResult +from .types.logs import DataScanEvent +from .types.logs import DiscoveryEvent +from .types.logs import EntryLinkEvent +from .types.logs import GovernanceEvent +from .types.logs import JobEvent +from .types.logs import SessionEvent +from .types.metadata_ import CreateEntityRequest +from .types.metadata_ import CreatePartitionRequest +from .types.metadata_ import DeleteEntityRequest +from .types.metadata_ import DeletePartitionRequest +from .types.metadata_ import Entity +from .types.metadata_ import GetEntityRequest +from .types.metadata_ import GetPartitionRequest +from .types.metadata_ import ListEntitiesRequest +from .types.metadata_ import ListEntitiesResponse +from .types.metadata_ import ListPartitionsRequest +from .types.metadata_ import ListPartitionsResponse +from .types.metadata_ import Partition +from .types.metadata_ import Schema +from .types.metadata_ import StorageAccess +from .types.metadata_ import StorageFormat +from .types.metadata_ import UpdateEntityRequest +from .types.metadata_ import StorageSystem +from .types.processing import DataSource +from .types.processing import ScannedData +from .types.processing import Trigger +from .types.resources import Action +from .types.resources import Asset +from .types.resources import AssetStatus +from .types.resources import Lake +from .types.resources import Zone +from .types.resources import State +from .types.security import DataAccessSpec +from .types.security import ResourceAccessSpec +from .types.service import CancelJobRequest +from .types.service import CreateAssetRequest +from .types.service import CreateEnvironmentRequest +from .types.service import CreateLakeRequest +from .types.service import CreateTaskRequest +from .types.service import CreateZoneRequest +from .types.service import DeleteAssetRequest +from .types.service import DeleteEnvironmentRequest +from .types.service import DeleteLakeRequest +from .types.service import DeleteTaskRequest +from .types.service import DeleteZoneRequest +from .types.service import GetAssetRequest +from .types.service import GetEnvironmentRequest +from .types.service import GetJobRequest +from .types.service import GetLakeRequest +from .types.service import GetTaskRequest +from .types.service import GetZoneRequest +from .types.service import ListActionsResponse +from .types.service import ListAssetActionsRequest +from .types.service import ListAssetsRequest +from .types.service import ListAssetsResponse +from .types.service import ListEnvironmentsRequest +from .types.service import ListEnvironmentsResponse +from .types.service import ListJobsRequest +from .types.service import ListJobsResponse +from .types.service import ListLakeActionsRequest +from .types.service import ListLakesRequest +from .types.service import ListLakesResponse +from .types.service import ListSessionsRequest +from .types.service import ListSessionsResponse +from .types.service import ListTasksRequest +from .types.service import ListTasksResponse +from .types.service import ListZoneActionsRequest +from .types.service import ListZonesRequest +from .types.service import ListZonesResponse +from .types.service import OperationMetadata +from .types.service import RunTaskRequest +from .types.service import RunTaskResponse +from .types.service import UpdateAssetRequest +from .types.service import UpdateEnvironmentRequest +from .types.service import UpdateLakeRequest +from .types.service import UpdateTaskRequest +from .types.service import UpdateZoneRequest +from .types.tasks import Job +from .types.tasks import Task + +__all__ = ( + 'BusinessGlossaryServiceAsyncClient', + 'CatalogServiceAsyncClient', + 'CmekServiceAsyncClient', + 'ContentServiceAsyncClient', + 'DataScanServiceAsyncClient', + 'DataTaxonomyServiceAsyncClient', + 'DataplexServiceAsyncClient', + 'MetadataServiceAsyncClient', +'Action', +'Aspect', +'AspectSource', +'AspectType', +'Asset', +'AssetStatus', +'BusinessGlossaryEvent', +'BusinessGlossaryServiceClient', +'CancelJobRequest', +'CancelMetadataJobRequest', +'CatalogServiceClient', +'CmekServiceClient', +'Content', +'ContentServiceClient', +'CreateAspectTypeRequest', +'CreateAssetRequest', +'CreateContentRequest', +'CreateDataAttributeBindingRequest', +'CreateDataAttributeRequest', +'CreateDataScanRequest', +'CreateDataTaxonomyRequest', +'CreateEncryptionConfigRequest', +'CreateEntityRequest', +'CreateEntryGroupRequest', +'CreateEntryLinkRequest', +'CreateEntryRequest', +'CreateEntryTypeRequest', +'CreateEnvironmentRequest', +'CreateGlossaryCategoryRequest', +'CreateGlossaryRequest', +'CreateGlossaryTermRequest', +'CreateLakeRequest', +'CreateMetadataJobRequest', +'CreatePartitionRequest', +'CreateTaskRequest', +'CreateZoneRequest', +'DataAccessSpec', +'DataAttribute', +'DataAttributeBinding', +'DataDiscoveryResult', +'DataDiscoverySpec', +'DataProfileResult', +'DataProfileSpec', +'DataQualityColumnResult', +'DataQualityDimension', +'DataQualityDimensionResult', +'DataQualityResult', +'DataQualityRule', +'DataQualityRuleResult', +'DataQualityScanRuleResult', +'DataQualitySpec', +'DataScan', +'DataScanCatalogPublishingStatus', +'DataScanEvent', +'DataScanJob', +'DataScanServiceClient', +'DataScanType', +'DataSource', +'DataTaxonomy', +'DataTaxonomyServiceClient', +'DataplexServiceClient', +'DeleteAspectTypeRequest', +'DeleteAssetRequest', +'DeleteContentRequest', +'DeleteDataAttributeBindingRequest', +'DeleteDataAttributeRequest', +'DeleteDataScanRequest', +'DeleteDataTaxonomyRequest', +'DeleteEncryptionConfigRequest', +'DeleteEntityRequest', +'DeleteEntryGroupRequest', +'DeleteEntryLinkRequest', +'DeleteEntryRequest', +'DeleteEntryTypeRequest', +'DeleteEnvironmentRequest', +'DeleteGlossaryCategoryRequest', +'DeleteGlossaryRequest', +'DeleteGlossaryTermRequest', +'DeleteLakeRequest', +'DeletePartitionRequest', +'DeleteTaskRequest', +'DeleteZoneRequest', +'DiscoveryEvent', +'EncryptionConfig', +'Entity', +'Entry', +'EntryGroup', +'EntryLink', +'EntryLinkEvent', +'EntrySource', +'EntryType', +'EntryView', +'Environment', +'GenerateDataQualityRulesRequest', +'GenerateDataQualityRulesResponse', +'GetAspectTypeRequest', +'GetAssetRequest', +'GetContentRequest', +'GetDataAttributeBindingRequest', +'GetDataAttributeRequest', +'GetDataScanJobRequest', +'GetDataScanRequest', +'GetDataTaxonomyRequest', +'GetEncryptionConfigRequest', +'GetEntityRequest', +'GetEntryGroupRequest', +'GetEntryLinkRequest', +'GetEntryRequest', +'GetEntryTypeRequest', +'GetEnvironmentRequest', +'GetGlossaryCategoryRequest', +'GetGlossaryRequest', +'GetGlossaryTermRequest', +'GetJobRequest', +'GetLakeRequest', +'GetMetadataJobRequest', +'GetPartitionRequest', +'GetTaskRequest', +'GetZoneRequest', +'Glossary', +'GlossaryCategory', +'GlossaryTerm', +'GovernanceEvent', +'ImportItem', +'Job', +'JobEvent', +'Lake', +'ListActionsResponse', +'ListAspectTypesRequest', +'ListAspectTypesResponse', +'ListAssetActionsRequest', +'ListAssetsRequest', +'ListAssetsResponse', +'ListContentRequest', +'ListContentResponse', +'ListDataAttributeBindingsRequest', +'ListDataAttributeBindingsResponse', +'ListDataAttributesRequest', +'ListDataAttributesResponse', +'ListDataScanJobsRequest', +'ListDataScanJobsResponse', +'ListDataScansRequest', +'ListDataScansResponse', +'ListDataTaxonomiesRequest', +'ListDataTaxonomiesResponse', +'ListEncryptionConfigsRequest', +'ListEncryptionConfigsResponse', +'ListEntitiesRequest', +'ListEntitiesResponse', +'ListEntriesRequest', +'ListEntriesResponse', +'ListEntryGroupsRequest', +'ListEntryGroupsResponse', +'ListEntryTypesRequest', +'ListEntryTypesResponse', +'ListEnvironmentsRequest', +'ListEnvironmentsResponse', +'ListGlossariesRequest', +'ListGlossariesResponse', +'ListGlossaryCategoriesRequest', +'ListGlossaryCategoriesResponse', +'ListGlossaryTermsRequest', +'ListGlossaryTermsResponse', +'ListJobsRequest', +'ListJobsResponse', +'ListLakeActionsRequest', +'ListLakesRequest', +'ListLakesResponse', +'ListMetadataJobsRequest', +'ListMetadataJobsResponse', +'ListPartitionsRequest', +'ListPartitionsResponse', +'ListSessionsRequest', +'ListSessionsResponse', +'ListTasksRequest', +'ListTasksResponse', +'ListZoneActionsRequest', +'ListZonesRequest', +'ListZonesResponse', +'LookupEntryRequest', +'MetadataJob', +'MetadataServiceClient', +'OperationMetadata', +'Partition', +'ResourceAccessSpec', +'RunDataScanRequest', +'RunDataScanResponse', +'RunTaskRequest', +'RunTaskResponse', +'ScannedData', +'Schema', +'SearchEntriesRequest', +'SearchEntriesResponse', +'SearchEntriesResult', +'Session', +'SessionEvent', +'State', +'StorageAccess', +'StorageFormat', +'StorageSystem', +'Task', +'TransferStatus', +'Trigger', +'UpdateAspectTypeRequest', +'UpdateAssetRequest', +'UpdateContentRequest', +'UpdateDataAttributeBindingRequest', +'UpdateDataAttributeRequest', +'UpdateDataScanRequest', +'UpdateDataTaxonomyRequest', +'UpdateEncryptionConfigRequest', +'UpdateEntityRequest', +'UpdateEntryGroupRequest', +'UpdateEntryRequest', +'UpdateEntryTypeRequest', +'UpdateEnvironmentRequest', +'UpdateGlossaryCategoryRequest', +'UpdateGlossaryRequest', +'UpdateGlossaryTermRequest', +'UpdateLakeRequest', +'UpdateTaskRequest', +'UpdateZoneRequest', +'Zone', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json new file mode 100644 index 000000000000..dd1090ea5c40 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json @@ -0,0 +1,2006 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataplex_v1", + "protoPackage": "google.cloud.dataplex.v1", + "schema": "1.0", + "services": { + "BusinessGlossaryService": { + "clients": { + "grpc": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BusinessGlossaryServiceAsyncClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "rest": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + } + } + }, + "CatalogService": { + "clients": { + "grpc": { + "libraryClient": "CatalogServiceClient", + "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, + "CreateAspectType": { + "methods": [ + "create_aspect_type" + ] + }, + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, + "CreateEntryType": { + "methods": [ + "create_entry_type" + ] + }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, + "DeleteAspectType": { + "methods": [ + "delete_aspect_type" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, + "DeleteEntryType": { + "methods": [ + "delete_entry_type" + ] + }, + "GetAspectType": { + "methods": [ + "get_aspect_type" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, + "GetEntryType": { + "methods": [ + "get_entry_type" + ] + }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, + "ListAspectTypes": { + "methods": [ + "list_aspect_types" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListEntryTypes": { + "methods": [ + "list_entry_types" + ] + }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "SearchEntries": { + "methods": [ + "search_entries" + ] + }, + "UpdateAspectType": { + "methods": [ + "update_aspect_type" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateEntryType": { + "methods": [ + "update_entry_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CatalogServiceAsyncClient", + "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, + "CreateAspectType": { + "methods": [ + "create_aspect_type" + ] + }, + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, + "CreateEntryType": { + "methods": [ + "create_entry_type" + ] + }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, + "DeleteAspectType": { + "methods": [ + "delete_aspect_type" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, + "DeleteEntryType": { + "methods": [ + "delete_entry_type" + ] + }, + "GetAspectType": { + "methods": [ + "get_aspect_type" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, + "GetEntryType": { + "methods": [ + "get_entry_type" + ] + }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, + "ListAspectTypes": { + "methods": [ + "list_aspect_types" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListEntryTypes": { + "methods": [ + "list_entry_types" + ] + }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "SearchEntries": { + "methods": [ + "search_entries" + ] + }, + "UpdateAspectType": { + "methods": [ + "update_aspect_type" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateEntryType": { + "methods": [ + "update_entry_type" + ] + } + } + }, + "rest": { + "libraryClient": "CatalogServiceClient", + "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, + "CreateAspectType": { + "methods": [ + "create_aspect_type" + ] + }, + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, + "CreateEntryType": { + "methods": [ + "create_entry_type" + ] + }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, + "DeleteAspectType": { + "methods": [ + "delete_aspect_type" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, + "DeleteEntryType": { + "methods": [ + "delete_entry_type" + ] + }, + "GetAspectType": { + "methods": [ + "get_aspect_type" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, + "GetEntryType": { + "methods": [ + "get_entry_type" + ] + }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, + "ListAspectTypes": { + "methods": [ + "list_aspect_types" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListEntryTypes": { + "methods": [ + "list_entry_types" + ] + }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "SearchEntries": { + "methods": [ + "search_entries" + ] + }, + "UpdateAspectType": { + "methods": [ + "update_aspect_type" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateEntryType": { + "methods": [ + "update_entry_type" + ] + } + } + } + } + }, + "CmekService": { + "clients": { + "grpc": { + "libraryClient": "CmekServiceClient", + "rpcs": { + "CreateEncryptionConfig": { + "methods": [ + "create_encryption_config" + ] + }, + "DeleteEncryptionConfig": { + "methods": [ + "delete_encryption_config" + ] + }, + "GetEncryptionConfig": { + "methods": [ + "get_encryption_config" + ] + }, + "ListEncryptionConfigs": { + "methods": [ + "list_encryption_configs" + ] + }, + "UpdateEncryptionConfig": { + "methods": [ + "update_encryption_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CmekServiceAsyncClient", + "rpcs": { + "CreateEncryptionConfig": { + "methods": [ + "create_encryption_config" + ] + }, + "DeleteEncryptionConfig": { + "methods": [ + "delete_encryption_config" + ] + }, + "GetEncryptionConfig": { + "methods": [ + "get_encryption_config" + ] + }, + "ListEncryptionConfigs": { + "methods": [ + "list_encryption_configs" + ] + }, + "UpdateEncryptionConfig": { + "methods": [ + "update_encryption_config" + ] + } + } + }, + "rest": { + "libraryClient": "CmekServiceClient", + "rpcs": { + "CreateEncryptionConfig": { + "methods": [ + "create_encryption_config" + ] + }, + "DeleteEncryptionConfig": { + "methods": [ + "delete_encryption_config" + ] + }, + "GetEncryptionConfig": { + "methods": [ + "get_encryption_config" + ] + }, + "ListEncryptionConfigs": { + "methods": [ + "list_encryption_configs" + ] + }, + "UpdateEncryptionConfig": { + "methods": [ + "update_encryption_config" + ] + } + } + } + } + }, + "ContentService": { + "clients": { + "grpc": { + "libraryClient": "ContentServiceClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ContentServiceAsyncClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + }, + "rest": { + "libraryClient": "ContentServiceClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + } + } + }, + "DataScanService": { + "clients": { + "grpc": { + "libraryClient": "DataScanServiceClient", + "rpcs": { + "CreateDataScan": { + "methods": [ + "create_data_scan" + ] + }, + "DeleteDataScan": { + "methods": [ + "delete_data_scan" + ] + }, + "GenerateDataQualityRules": { + "methods": [ + "generate_data_quality_rules" + ] + }, + "GetDataScan": { + "methods": [ + "get_data_scan" + ] + }, + "GetDataScanJob": { + "methods": [ + "get_data_scan_job" + ] + }, + "ListDataScanJobs": { + "methods": [ + "list_data_scan_jobs" + ] + }, + "ListDataScans": { + "methods": [ + "list_data_scans" + ] + }, + "RunDataScan": { + "methods": [ + "run_data_scan" + ] + }, + "UpdateDataScan": { + "methods": [ + "update_data_scan" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataScanServiceAsyncClient", + "rpcs": { + "CreateDataScan": { + "methods": [ + "create_data_scan" + ] + }, + "DeleteDataScan": { + "methods": [ + "delete_data_scan" + ] + }, + "GenerateDataQualityRules": { + "methods": [ + "generate_data_quality_rules" + ] + }, + "GetDataScan": { + "methods": [ + "get_data_scan" + ] + }, + "GetDataScanJob": { + "methods": [ + "get_data_scan_job" + ] + }, + "ListDataScanJobs": { + "methods": [ + "list_data_scan_jobs" + ] + }, + "ListDataScans": { + "methods": [ + "list_data_scans" + ] + }, + "RunDataScan": { + "methods": [ + "run_data_scan" + ] + }, + "UpdateDataScan": { + "methods": [ + "update_data_scan" + ] + } + } + }, + "rest": { + "libraryClient": "DataScanServiceClient", + "rpcs": { + "CreateDataScan": { + "methods": [ + "create_data_scan" + ] + }, + "DeleteDataScan": { + "methods": [ + "delete_data_scan" + ] + }, + "GenerateDataQualityRules": { + "methods": [ + "generate_data_quality_rules" + ] + }, + "GetDataScan": { + "methods": [ + "get_data_scan" + ] + }, + "GetDataScanJob": { + "methods": [ + "get_data_scan_job" + ] + }, + "ListDataScanJobs": { + "methods": [ + "list_data_scan_jobs" + ] + }, + "ListDataScans": { + "methods": [ + "list_data_scans" + ] + }, + "RunDataScan": { + "methods": [ + "run_data_scan" + ] + }, + "UpdateDataScan": { + "methods": [ + "update_data_scan" + ] + } + } + } + } + }, + "DataTaxonomyService": { + "clients": { + "grpc": { + "libraryClient": "DataTaxonomyServiceClient", + "rpcs": { + "CreateDataAttribute": { + "methods": [ + "create_data_attribute" + ] + }, + "CreateDataAttributeBinding": { + "methods": [ + "create_data_attribute_binding" + ] + }, + "CreateDataTaxonomy": { + "methods": [ + "create_data_taxonomy" + ] + }, + "DeleteDataAttribute": { + "methods": [ + "delete_data_attribute" + ] + }, + "DeleteDataAttributeBinding": { + "methods": [ + "delete_data_attribute_binding" + ] + }, + "DeleteDataTaxonomy": { + "methods": [ + "delete_data_taxonomy" + ] + }, + "GetDataAttribute": { + "methods": [ + "get_data_attribute" + ] + }, + "GetDataAttributeBinding": { + "methods": [ + "get_data_attribute_binding" + ] + }, + "GetDataTaxonomy": { + "methods": [ + "get_data_taxonomy" + ] + }, + "ListDataAttributeBindings": { + "methods": [ + "list_data_attribute_bindings" + ] + }, + "ListDataAttributes": { + "methods": [ + "list_data_attributes" + ] + }, + "ListDataTaxonomies": { + "methods": [ + "list_data_taxonomies" + ] + }, + "UpdateDataAttribute": { + "methods": [ + "update_data_attribute" + ] + }, + "UpdateDataAttributeBinding": { + "methods": [ + "update_data_attribute_binding" + ] + }, + "UpdateDataTaxonomy": { + "methods": [ + "update_data_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataTaxonomyServiceAsyncClient", + "rpcs": { + "CreateDataAttribute": { + "methods": [ + "create_data_attribute" + ] + }, + "CreateDataAttributeBinding": { + "methods": [ + "create_data_attribute_binding" + ] + }, + "CreateDataTaxonomy": { + "methods": [ + "create_data_taxonomy" + ] + }, + "DeleteDataAttribute": { + "methods": [ + "delete_data_attribute" + ] + }, + "DeleteDataAttributeBinding": { + "methods": [ + "delete_data_attribute_binding" + ] + }, + "DeleteDataTaxonomy": { + "methods": [ + "delete_data_taxonomy" + ] + }, + "GetDataAttribute": { + "methods": [ + "get_data_attribute" + ] + }, + "GetDataAttributeBinding": { + "methods": [ + "get_data_attribute_binding" + ] + }, + "GetDataTaxonomy": { + "methods": [ + "get_data_taxonomy" + ] + }, + "ListDataAttributeBindings": { + "methods": [ + "list_data_attribute_bindings" + ] + }, + "ListDataAttributes": { + "methods": [ + "list_data_attributes" + ] + }, + "ListDataTaxonomies": { + "methods": [ + "list_data_taxonomies" + ] + }, + "UpdateDataAttribute": { + "methods": [ + "update_data_attribute" + ] + }, + "UpdateDataAttributeBinding": { + "methods": [ + "update_data_attribute_binding" + ] + }, + "UpdateDataTaxonomy": { + "methods": [ + "update_data_taxonomy" + ] + } + } + }, + "rest": { + "libraryClient": "DataTaxonomyServiceClient", + "rpcs": { + "CreateDataAttribute": { + "methods": [ + "create_data_attribute" + ] + }, + "CreateDataAttributeBinding": { + "methods": [ + "create_data_attribute_binding" + ] + }, + "CreateDataTaxonomy": { + "methods": [ + "create_data_taxonomy" + ] + }, + "DeleteDataAttribute": { + "methods": [ + "delete_data_attribute" + ] + }, + "DeleteDataAttributeBinding": { + "methods": [ + "delete_data_attribute_binding" + ] + }, + "DeleteDataTaxonomy": { + "methods": [ + "delete_data_taxonomy" + ] + }, + "GetDataAttribute": { + "methods": [ + "get_data_attribute" + ] + }, + "GetDataAttributeBinding": { + "methods": [ + "get_data_attribute_binding" + ] + }, + "GetDataTaxonomy": { + "methods": [ + "get_data_taxonomy" + ] + }, + "ListDataAttributeBindings": { + "methods": [ + "list_data_attribute_bindings" + ] + }, + "ListDataAttributes": { + "methods": [ + "list_data_attributes" + ] + }, + "ListDataTaxonomies": { + "methods": [ + "list_data_taxonomies" + ] + }, + "UpdateDataAttribute": { + "methods": [ + "update_data_attribute" + ] + }, + "UpdateDataAttributeBinding": { + "methods": [ + "update_data_attribute_binding" + ] + }, + "UpdateDataTaxonomy": { + "methods": [ + "update_data_taxonomy" + ] + } + } + } + } + }, + "DataplexService": { + "clients": { + "grpc": { + "libraryClient": "DataplexServiceClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateAsset": { + "methods": [ + "create_asset" + ] + }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateLake": { + "methods": [ + "create_lake" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteAsset": { + "methods": [ + "delete_asset" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteLake": { + "methods": [ + "delete_lake" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetAsset": { + "methods": [ + "get_asset" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetLake": { + "methods": [ + "get_lake" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListAssetActions": { + "methods": [ + "list_asset_actions" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "ListLakeActions": { + "methods": [ + "list_lake_actions" + ] + }, + "ListLakes": { + "methods": [ + "list_lakes" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "ListZoneActions": { + "methods": [ + "list_zone_actions" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "UpdateAsset": { + "methods": [ + "update_asset" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, + "UpdateLake": { + "methods": [ + "update_lake" + ] + }, + "UpdateTask": { + "methods": [ + "update_task" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataplexServiceAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateAsset": { + "methods": [ + "create_asset" + ] + }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateLake": { + "methods": [ + "create_lake" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteAsset": { + "methods": [ + "delete_asset" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteLake": { + "methods": [ + "delete_lake" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetAsset": { + "methods": [ + "get_asset" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetLake": { + "methods": [ + "get_lake" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListAssetActions": { + "methods": [ + "list_asset_actions" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "ListLakeActions": { + "methods": [ + "list_lake_actions" + ] + }, + "ListLakes": { + "methods": [ + "list_lakes" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "ListZoneActions": { + "methods": [ + "list_zone_actions" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "UpdateAsset": { + "methods": [ + "update_asset" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, + "UpdateLake": { + "methods": [ + "update_lake" + ] + }, + "UpdateTask": { + "methods": [ + "update_task" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + }, + "rest": { + "libraryClient": "DataplexServiceClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "CreateAsset": { + "methods": [ + "create_asset" + ] + }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateLake": { + "methods": [ + "create_lake" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "CreateZone": { + "methods": [ + "create_zone" + ] + }, + "DeleteAsset": { + "methods": [ + "delete_asset" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteLake": { + "methods": [ + "delete_lake" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "DeleteZone": { + "methods": [ + "delete_zone" + ] + }, + "GetAsset": { + "methods": [ + "get_asset" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetLake": { + "methods": [ + "get_lake" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "GetZone": { + "methods": [ + "get_zone" + ] + }, + "ListAssetActions": { + "methods": [ + "list_asset_actions" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "ListLakeActions": { + "methods": [ + "list_lake_actions" + ] + }, + "ListLakes": { + "methods": [ + "list_lakes" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "ListZoneActions": { + "methods": [ + "list_zone_actions" + ] + }, + "ListZones": { + "methods": [ + "list_zones" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "UpdateAsset": { + "methods": [ + "update_asset" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, + "UpdateLake": { + "methods": [ + "update_lake" + ] + }, + "UpdateTask": { + "methods": [ + "update_task" + ] + }, + "UpdateZone": { + "methods": [ + "update_zone" + ] + } + } + } + } + }, + "MetadataService": { + "clients": { + "grpc": { + "libraryClient": "MetadataServiceClient", + "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, + "GetEntity": { + "methods": [ + "get_entity" + ] + }, + "GetPartition": { + "methods": [ + "get_partition" + ] + }, + "ListEntities": { + "methods": [ + "list_entities" + ] + }, + "ListPartitions": { + "methods": [ + "list_partitions" + ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetadataServiceAsyncClient", + "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, + "GetEntity": { + "methods": [ + "get_entity" + ] + }, + "GetPartition": { + "methods": [ + "get_partition" + ] + }, + "ListEntities": { + "methods": [ + "list_entities" + ] + }, + "ListPartitions": { + "methods": [ + "list_partitions" + ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] + } + } + }, + "rest": { + "libraryClient": "MetadataServiceClient", + "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, + "GetEntity": { + "methods": [ + "get_entity" + ] + }, + "GetPartition": { + "methods": [ + "get_partition" + ] + }, + "ListEntities": { + "methods": [ + "list_entities" + ] + }, + "ListPartitions": { + "methods": [ + "list_partitions" + ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py new file mode 100644 index 000000000000..20a9cd975b02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed new file mode 100644 index 000000000000..c932c263028e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py new file mode 100644 index 000000000000..cbf94b283c70 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py new file mode 100644 index 000000000000..c9e791266c6a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BusinessGlossaryServiceClient +from .async_client import BusinessGlossaryServiceAsyncClient + +__all__ = ( + 'BusinessGlossaryServiceClient', + 'BusinessGlossaryServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py new file mode 100644 index 000000000000..576a6fd46a8f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py @@ -0,0 +1,2395 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport +from .client import BusinessGlossaryServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class BusinessGlossaryServiceAsyncClient: + """BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + """ + + _client: BusinessGlossaryServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + + glossary_path = staticmethod(BusinessGlossaryServiceClient.glossary_path) + parse_glossary_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_path) + glossary_category_path = staticmethod(BusinessGlossaryServiceClient.glossary_category_path) + parse_glossary_category_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_category_path) + glossary_term_path = staticmethod(BusinessGlossaryServiceClient.glossary_term_path) + parse_glossary_term_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_term_path) + common_billing_account_path = staticmethod(BusinessGlossaryServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(BusinessGlossaryServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(BusinessGlossaryServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(BusinessGlossaryServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(BusinessGlossaryServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(BusinessGlossaryServiceClient.parse_common_organization_path) + common_project_path = staticmethod(BusinessGlossaryServiceClient.common_project_path) + parse_common_project_path = staticmethod(BusinessGlossaryServiceClient.parse_common_project_path) + common_location_path = staticmethod(BusinessGlossaryServiceClient.common_location_path) + parse_common_location_path = staticmethod(BusinessGlossaryServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceAsyncClient: The constructed client. + """ + return BusinessGlossaryServiceClient.from_service_account_info.__func__(BusinessGlossaryServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceAsyncClient: The constructed client. + """ + return BusinessGlossaryServiceClient.from_service_account_file.__func__(BusinessGlossaryServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BusinessGlossaryServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BusinessGlossaryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessGlossaryServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BusinessGlossaryServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BusinessGlossaryServiceTransport, Callable[..., BusinessGlossaryServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business glossary service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessGlossaryServiceTransport,Callable[..., BusinessGlossaryServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessGlossaryServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BusinessGlossaryServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "credentialsType": None, + } + ) + + async def create_glossary(self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryRequest, dict]]): + The request object. Create Glossary Request + parent (:class:`str`): + Required. The parent resource where this Glossary will + be created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary (:class:`google.cloud.dataplex_v1.types.Glossary`): + Required. The Glossary to create. + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_id (:class:`str`): + Required. Glossary ID: Glossary + identifier. + + This corresponds to the ``glossary_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, glossary, glossary_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryRequest): + request = business_glossary.CreateGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if glossary is not None: + request.glossary = glossary + if glossary_id is not None: + request.glossary_id = glossary_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_glossary(self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryRequest, dict]]): + The request object. Update Glossary Request + glossary (:class:`google.cloud.dataplex_v1.types.Glossary`): + Required. The Glossary to update. The Glossary's + ``name`` field is used to identify the Glossary to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [glossary, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryRequest): + request = business_glossary.UpdateGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("glossary.name", request.glossary.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_glossary(self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryRequest, dict]]): + The request object. Delete Glossary Request + name (:class:`str`): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryRequest): + request = business_glossary.DeleteGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_glossary(self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: + r"""Gets a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryRequest, dict]]): + The request object. Get Glossary Request + name (:class:`str`): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryRequest): + request = business_glossary.GetGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossaries(self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesAsyncPager: + r"""Lists Glossary resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossariesRequest, dict]]): + The request object. List Glossaries Request + parent (:class:`str`): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager: + List Glossaries Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossariesRequest): + request = business_glossary.ListGlossariesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossaries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossariesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_glossary_category(self, + request: Optional[Union[business_glossary.CreateGlossaryCategoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Creates a new GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = await client.create_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest, dict]]): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + parent (:class:`str`): + Required. The parent resource where this + GlossaryCategory will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category (:class:`google.cloud.dataplex_v1.types.GlossaryCategory`): + Required. The GlossaryCategory to + create. + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category_id (:class:`str`): + Required. GlossaryCategory + identifier. + + This corresponds to the ``category_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, category, category_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryCategoryRequest): + request = business_glossary.CreateGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if category is not None: + request.category = category + if category_id is not None: + request.category_id = category_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_glossary_category(self, + request: Optional[Union[business_glossary.UpdateGlossaryCategoryRequest, dict]] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Updates a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = await client.update_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest, dict]]): + The request object. Update GlossaryCategory Request + category (:class:`google.cloud.dataplex_v1.types.GlossaryCategory`): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify + the GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [category, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryCategoryRequest): + request = business_glossary.UpdateGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if category is not None: + request.category = category + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("category.name", request.category.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_glossary_category(self, + request: Optional[Union[business_glossary.DeleteGlossaryCategoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_category(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest, dict]]): + The request object. Delete GlossaryCategory Request + name (:class:`str`): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryCategoryRequest): + request = business_glossary.DeleteGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_glossary_category(self, + request: Optional[Union[business_glossary.GetGlossaryCategoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Gets a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest, dict]]): + The request object. Get GlossaryCategory Request + name (:class:`str`): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryCategoryRequest): + request = business_glossary.GetGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossary_categories(self, + request: Optional[Union[business_glossary.ListGlossaryCategoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesAsyncPager: + r"""Lists GlossaryCategory resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest, dict]]): + The request object. List GlossaryCategories Request + parent (:class:`str`): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager: + List GlossaryCategories Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryCategoriesRequest): + request = business_glossary.ListGlossaryCategoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossary_categories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossaryCategoriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_glossary_term(self, + request: Optional[Union[business_glossary.CreateGlossaryTermRequest, dict]] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Creates a new GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = await client.create_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryTermRequest, dict]]): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + parent (:class:`str`): + Required. The parent resource where the GlossaryTerm + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term (:class:`google.cloud.dataplex_v1.types.GlossaryTerm`): + Required. The GlossaryTerm to create. + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term_id (:class:`str`): + Required. GlossaryTerm identifier. + This corresponds to the ``term_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, term, term_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryTermRequest): + request = business_glossary.CreateGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if term is not None: + request.term = term + if term_id is not None: + request.term_id = term_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_glossary_term(self, + request: Optional[Union[business_glossary.UpdateGlossaryTermRequest, dict]] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Updates a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = await client.update_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest, dict]]): + The request object. Update GlossaryTerm Request + term (:class:`google.cloud.dataplex_v1.types.GlossaryTerm`): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [term, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryTermRequest): + request = business_glossary.UpdateGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if term is not None: + request.term = term + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("term.name", request.term.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_glossary_term(self, + request: Optional[Union[business_glossary.DeleteGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_term(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest, dict]]): + The request object. Delete GlossaryTerm Request + name (:class:`str`): + Required. The name of the GlossaryTerm to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryTermRequest): + request = business_glossary.DeleteGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_glossary_term(self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Gets a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryTermRequest, dict]]): + The request object. Get GlossaryTerm Request + name (:class:`str`): + Required. The name of the GlossaryTerm to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryTermRequest): + request = business_glossary.GetGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossary_terms(self, + request: Optional[Union[business_glossary.ListGlossaryTermsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsAsyncPager: + r"""Lists GlossaryTerm resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossaryTermsRequest, dict]]): + The request object. List GlossaryTerms Request + parent (:class:`str`): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager: + List GlossaryTerms Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryTermsRequest): + request = business_glossary.ListGlossaryTermsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossary_terms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossaryTermsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BusinessGlossaryServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "BusinessGlossaryServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py new file mode 100644 index 000000000000..7aa0a15b39b1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py @@ -0,0 +1,2770 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import BusinessGlossaryServiceGrpcTransport +from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport +from .transports.rest import BusinessGlossaryServiceRestTransport + + +class BusinessGlossaryServiceClientMeta(type): + """Metaclass for the BusinessGlossaryService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[BusinessGlossaryServiceTransport]] + _transport_registry["grpc"] = BusinessGlossaryServiceGrpcTransport + _transport_registry["grpc_asyncio"] = BusinessGlossaryServiceGrpcAsyncIOTransport + _transport_registry["rest"] = BusinessGlossaryServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[BusinessGlossaryServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BusinessGlossaryServiceClient(metaclass=BusinessGlossaryServiceClientMeta): + """BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BusinessGlossaryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessGlossaryServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def glossary_path(project: str,location: str,glossary: str,) -> str: + """Returns a fully-qualified glossary string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + + @staticmethod + def parse_glossary_path(path: str) -> Dict[str,str]: + """Parses a glossary path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def glossary_category_path(project: str,location: str,glossary: str,glossary_category: str,) -> str: + """Returns a fully-qualified glossary_category string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format(project=project, location=location, glossary=glossary, glossary_category=glossary_category, ) + + @staticmethod + def parse_glossary_category_path(path: str) -> Dict[str,str]: + """Parses a glossary_category path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/categories/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def glossary_term_path(project: str,location: str,glossary: str,glossary_term: str,) -> str: + """Returns a fully-qualified glossary_term string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format(project=project, location=location, glossary=glossary, glossary_term=glossary_term, ) + + @staticmethod + def parse_glossary_term_path(path: str) -> Dict[str,str]: + """Parses a glossary_term path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/terms/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BusinessGlossaryServiceTransport, Callable[..., BusinessGlossaryServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business glossary service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessGlossaryServiceTransport,Callable[..., BusinessGlossaryServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessGlossaryServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BusinessGlossaryServiceClient._read_environment_variables() + self._client_cert_source = BusinessGlossaryServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = BusinessGlossaryServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BusinessGlossaryServiceTransport) + if transport_provided: + # transport is a BusinessGlossaryServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BusinessGlossaryServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + BusinessGlossaryServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[BusinessGlossaryServiceTransport], Callable[..., BusinessGlossaryServiceTransport]] = ( + BusinessGlossaryServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BusinessGlossaryServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "credentialsType": None, + } + ) + + def create_glossary(self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryRequest, dict]): + The request object. Create Glossary Request + parent (str): + Required. The parent resource where this Glossary will + be created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to create. + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_id (str): + Required. Glossary ID: Glossary + identifier. + + This corresponds to the ``glossary_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, glossary, glossary_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryRequest): + request = business_glossary.CreateGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if glossary is not None: + request.glossary = glossary + if glossary_id is not None: + request.glossary_id = glossary_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_glossary(self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryRequest, dict]): + The request object. Update Glossary Request + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to update. The Glossary's + ``name`` field is used to identify the Glossary to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [glossary, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryRequest): + request = business_glossary.UpdateGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("glossary.name", request.glossary.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_glossary(self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryRequest, dict]): + The request object. Delete Glossary Request + name (str): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryRequest): + request = business_glossary.DeleteGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_glossary(self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: + r"""Gets a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryRequest, dict]): + The request object. Get Glossary Request + name (str): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryRequest): + request = business_glossary.GetGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossaries(self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesPager: + r"""Lists Glossary resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossariesRequest, dict]): + The request object. List Glossaries Request + parent (str): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager: + List Glossaries Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossariesRequest): + request = business_glossary.ListGlossariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossaries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_glossary_category(self, + request: Optional[Union[business_glossary.CreateGlossaryCategoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Creates a new GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = client.create_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest, dict]): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + parent (str): + Required. The parent resource where this + GlossaryCategory will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to + create. + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category_id (str): + Required. GlossaryCategory + identifier. + + This corresponds to the ``category_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, category, category_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryCategoryRequest): + request = business_glossary.CreateGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if category is not None: + request.category = category + if category_id is not None: + request.category_id = category_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_glossary_category(self, + request: Optional[Union[business_glossary.UpdateGlossaryCategoryRequest, dict]] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Updates a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = client.update_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest, dict]): + The request object. Update GlossaryCategory Request + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify + the GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [category, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryCategoryRequest): + request = business_glossary.UpdateGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if category is not None: + request.category = category + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("category.name", request.category.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_glossary_category(self, + request: Optional[Union[business_glossary.DeleteGlossaryCategoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_category(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest, dict]): + The request object. Delete GlossaryCategory Request + name (str): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryCategoryRequest): + request = business_glossary.DeleteGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_glossary_category(self, + request: Optional[Union[business_glossary.GetGlossaryCategoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Gets a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest, dict]): + The request object. Get GlossaryCategory Request + name (str): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryCategoryRequest): + request = business_glossary.GetGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossary_categories(self, + request: Optional[Union[business_glossary.ListGlossaryCategoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesPager: + r"""Lists GlossaryCategory resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest, dict]): + The request object. List GlossaryCategories Request + parent (str): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager: + List GlossaryCategories Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryCategoriesRequest): + request = business_glossary.ListGlossaryCategoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossary_categories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossaryCategoriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_glossary_term(self, + request: Optional[Union[business_glossary.CreateGlossaryTermRequest, dict]] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Creates a new GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = client.create_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryTermRequest, dict]): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + parent (str): + Required. The parent resource where the GlossaryTerm + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to create. + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term_id (str): + Required. GlossaryTerm identifier. + This corresponds to the ``term_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, term, term_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryTermRequest): + request = business_glossary.CreateGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if term is not None: + request.term = term + if term_id is not None: + request.term_id = term_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_glossary_term(self, + request: Optional[Union[business_glossary.UpdateGlossaryTermRequest, dict]] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Updates a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = client.update_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest, dict]): + The request object. Update GlossaryTerm Request + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [term, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryTermRequest): + request = business_glossary.UpdateGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if term is not None: + request.term = term + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("term.name", request.term.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_glossary_term(self, + request: Optional[Union[business_glossary.DeleteGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_term(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest, dict]): + The request object. Delete GlossaryTerm Request + name (str): + Required. The name of the GlossaryTerm to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryTermRequest): + request = business_glossary.DeleteGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_glossary_term(self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Gets a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryTermRequest, dict]): + The request object. Get GlossaryTerm Request + name (str): + Required. The name of the GlossaryTerm to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryTermRequest): + request = business_glossary.GetGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossary_terms(self, + request: Optional[Union[business_glossary.ListGlossaryTermsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsPager: + r"""Lists GlossaryTerm resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossaryTermsRequest, dict]): + The request object. List GlossaryTerms Request + parent (str): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager: + List GlossaryTerms Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryTermsRequest): + request = business_glossary.ListGlossaryTermsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossary_terms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossaryTermsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BusinessGlossaryServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "BusinessGlossaryServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py new file mode 100644 index 000000000000..7d055c9b172a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary + + +class ListGlossariesPager: + """A pager for iterating through ``list_glossaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``glossaries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaries`` requests and continue to iterate + through the ``glossaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., business_glossary.ListGlossariesResponse], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossariesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossariesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.Glossary]: + for page in self.pages: + yield from page.glossaries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGlossariesAsyncPager: + """A pager for iterating through ``list_glossaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``glossaries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaries`` requests and continue to iterate + through the ``glossaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[business_glossary.ListGlossariesResponse]], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossariesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossariesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[business_glossary.ListGlossariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.Glossary]: + async def async_generator(): + async for page in self.pages: + for response in page.glossaries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGlossaryCategoriesPager: + """A pager for iterating through ``list_glossary_categories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``categories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaryCategories`` requests and continue to iterate + through the ``categories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., business_glossary.ListGlossaryCategoriesResponse], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryCategoriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossaryCategoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.GlossaryCategory]: + for page in self.pages: + yield from page.categories + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGlossaryCategoriesAsyncPager: + """A pager for iterating through ``list_glossary_categories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``categories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaryCategories`` requests and continue to iterate + through the ``categories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[business_glossary.ListGlossaryCategoriesResponse]], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryCategoriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[business_glossary.ListGlossaryCategoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryCategory]: + async def async_generator(): + async for page in self.pages: + for response in page.categories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGlossaryTermsPager: + """A pager for iterating through ``list_glossary_terms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``terms`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaryTerms`` requests and continue to iterate + through the ``terms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., business_glossary.ListGlossaryTermsResponse], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryTermsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryTermsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryTermsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossaryTermsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.GlossaryTerm]: + for page in self.pages: + yield from page.terms + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGlossaryTermsAsyncPager: + """A pager for iterating through ``list_glossary_terms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``terms`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaryTerms`` requests and continue to iterate + through the ``terms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[business_glossary.ListGlossaryTermsResponse]], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryTermsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryTermsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryTermsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[business_glossary.ListGlossaryTermsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryTerm]: + async def async_generator(): + async for page in self.pages: + for response in page.terms: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst new file mode 100644 index 000000000000..2b7007a38bee --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`BusinessGlossaryServiceTransport` is the ABC for all transports. +- public child `BusinessGlossaryServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `BusinessGlossaryServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseBusinessGlossaryServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `BusinessGlossaryServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py new file mode 100644 index 000000000000..552b11ee5625 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BusinessGlossaryServiceTransport +from .grpc import BusinessGlossaryServiceGrpcTransport +from .grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport +from .rest import BusinessGlossaryServiceRestTransport +from .rest import BusinessGlossaryServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BusinessGlossaryServiceTransport]] +_transport_registry['grpc'] = BusinessGlossaryServiceGrpcTransport +_transport_registry['grpc_asyncio'] = BusinessGlossaryServiceGrpcAsyncIOTransport +_transport_registry['rest'] = BusinessGlossaryServiceRestTransport + +__all__ = ( + 'BusinessGlossaryServiceTransport', + 'BusinessGlossaryServiceGrpcTransport', + 'BusinessGlossaryServiceGrpcAsyncIOTransport', + 'BusinessGlossaryServiceRestTransport', + 'BusinessGlossaryServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py new file mode 100644 index 000000000000..4eab8f48345e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BusinessGlossaryServiceTransport(abc.ABC): + """Abstract transport class for BusinessGlossaryService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_glossary: gapic_v1.method.wrap_method( + self.create_glossary, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary: gapic_v1.method.wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary: gapic_v1.method.wrap_method( + self.delete_glossary, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary: gapic_v1.method.wrap_method( + self.get_glossary, + default_timeout=None, + client_info=client_info, + ), + self.list_glossaries: gapic_v1.method.wrap_method( + self.list_glossaries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_category: gapic_v1.method.wrap_method( + self.create_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_category: gapic_v1.method.wrap_method( + self.update_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_category: gapic_v1.method.wrap_method( + self.delete_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_category: gapic_v1.method.wrap_method( + self.get_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_categories: gapic_v1.method.wrap_method( + self.list_glossary_categories, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_term: gapic_v1.method.wrap_method( + self.create_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_term: gapic_v1.method.wrap_method( + self.update_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_term: gapic_v1.method.wrap_method( + self.delete_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_term: gapic_v1.method.wrap_method( + self.get_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_terms: gapic_v1.method.wrap_method( + self.list_glossary_terms, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_glossary(self) -> Callable[ + [business_glossary.CreateGlossaryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_glossary(self) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_glossary(self) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_glossary(self) -> Callable[ + [business_glossary.GetGlossaryRequest], + Union[ + business_glossary.Glossary, + Awaitable[business_glossary.Glossary] + ]]: + raise NotImplementedError() + + @property + def list_glossaries(self) -> Callable[ + [business_glossary.ListGlossariesRequest], + Union[ + business_glossary.ListGlossariesResponse, + Awaitable[business_glossary.ListGlossariesResponse] + ]]: + raise NotImplementedError() + + @property + def create_glossary_category(self) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory] + ]]: + raise NotImplementedError() + + @property + def update_glossary_category(self) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory] + ]]: + raise NotImplementedError() + + @property + def delete_glossary_category(self) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_glossary_category(self) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory] + ]]: + raise NotImplementedError() + + @property + def list_glossary_categories(self) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Union[ + business_glossary.ListGlossaryCategoriesResponse, + Awaitable[business_glossary.ListGlossaryCategoriesResponse] + ]]: + raise NotImplementedError() + + @property + def create_glossary_term(self) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, + Awaitable[business_glossary.GlossaryTerm] + ]]: + raise NotImplementedError() + + @property + def update_glossary_term(self) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, + Awaitable[business_glossary.GlossaryTerm] + ]]: + raise NotImplementedError() + + @property + def delete_glossary_term(self) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_glossary_term(self) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, + Awaitable[business_glossary.GlossaryTerm] + ]]: + raise NotImplementedError() + + @property + def list_glossary_terms(self) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Union[ + business_glossary.ListGlossaryTermsResponse, + Awaitable[business_glossary.ListGlossaryTermsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'BusinessGlossaryServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py new file mode 100644 index 000000000000..aa49382c72e8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py @@ -0,0 +1,852 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BusinessGlossaryServiceGrpcTransport(BusinessGlossaryServiceTransport): + """gRPC backend transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_glossary(self) -> Callable[ + [business_glossary.CreateGlossaryRequest], + operations_pb2.Operation]: + r"""Return a callable for the create glossary method over gRPC. + + Creates a new Glossary resource. + + Returns: + Callable[[~.CreateGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary' not in self._stubs: + self._stubs['create_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary', + request_serializer=business_glossary.CreateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_glossary'] + + @property + def update_glossary(self) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + operations_pb2.Operation]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a Glossary resource. + + Returns: + Callable[[~.UpdateGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary' not in self._stubs: + self._stubs['update_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary', + request_serializer=business_glossary.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_glossary'] + + @property + def delete_glossary(self) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete glossary method over gRPC. + + Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + Returns: + Callable[[~.DeleteGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary' not in self._stubs: + self._stubs['delete_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary', + request_serializer=business_glossary.DeleteGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_glossary'] + + @property + def get_glossary(self) -> Callable[ + [business_glossary.GetGlossaryRequest], + business_glossary.Glossary]: + r"""Return a callable for the get glossary method over gRPC. + + Gets a Glossary resource. + + Returns: + Callable[[~.GetGlossaryRequest], + ~.Glossary]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary' not in self._stubs: + self._stubs['get_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary', + request_serializer=business_glossary.GetGlossaryRequest.serialize, + response_deserializer=business_glossary.Glossary.deserialize, + ) + return self._stubs['get_glossary'] + + @property + def list_glossaries(self) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse]: + r"""Return a callable for the list glossaries method over gRPC. + + Lists Glossary resources in a project and location. + + Returns: + Callable[[~.ListGlossariesRequest], + ~.ListGlossariesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossaries' not in self._stubs: + self._stubs['list_glossaries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries', + request_serializer=business_glossary.ListGlossariesRequest.serialize, + response_deserializer=business_glossary.ListGlossariesResponse.deserialize, + ) + return self._stubs['list_glossaries'] + + @property + def create_glossary_category(self) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + r"""Return a callable for the create glossary category method over gRPC. + + Creates a new GlossaryCategory resource. + + Returns: + Callable[[~.CreateGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary_category' not in self._stubs: + self._stubs['create_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory', + request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['create_glossary_category'] + + @property + def update_glossary_category(self) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + r"""Return a callable for the update glossary category method over gRPC. + + Updates a GlossaryCategory resource. + + Returns: + Callable[[~.UpdateGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary_category' not in self._stubs: + self._stubs['update_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory', + request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['update_glossary_category'] + + @property + def delete_glossary_category(self) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete glossary category method over gRPC. + + Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + Returns: + Callable[[~.DeleteGlossaryCategoryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary_category' not in self._stubs: + self._stubs['delete_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory', + request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_glossary_category'] + + @property + def get_glossary_category(self) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + r"""Return a callable for the get glossary category method over gRPC. + + Gets a GlossaryCategory resource. + + Returns: + Callable[[~.GetGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary_category' not in self._stubs: + self._stubs['get_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory', + request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['get_glossary_category'] + + @property + def list_glossary_categories(self) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse]: + r"""Return a callable for the list glossary categories method over gRPC. + + Lists GlossaryCategory resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryCategoriesRequest], + ~.ListGlossaryCategoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossary_categories' not in self._stubs: + self._stubs['list_glossary_categories'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories', + request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, + response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, + ) + return self._stubs['list_glossary_categories'] + + @property + def create_glossary_term(self) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + business_glossary.GlossaryTerm]: + r"""Return a callable for the create glossary term method over gRPC. + + Creates a new GlossaryTerm resource. + + Returns: + Callable[[~.CreateGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary_term' not in self._stubs: + self._stubs['create_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm', + request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['create_glossary_term'] + + @property + def update_glossary_term(self) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + business_glossary.GlossaryTerm]: + r"""Return a callable for the update glossary term method over gRPC. + + Updates a GlossaryTerm resource. + + Returns: + Callable[[~.UpdateGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary_term' not in self._stubs: + self._stubs['update_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm', + request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['update_glossary_term'] + + @property + def delete_glossary_term(self) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete glossary term method over gRPC. + + Deletes a GlossaryTerm resource. + + Returns: + Callable[[~.DeleteGlossaryTermRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary_term' not in self._stubs: + self._stubs['delete_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm', + request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_glossary_term'] + + @property + def get_glossary_term(self) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + business_glossary.GlossaryTerm]: + r"""Return a callable for the get glossary term method over gRPC. + + Gets a GlossaryTerm resource. + + Returns: + Callable[[~.GetGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary_term' not in self._stubs: + self._stubs['get_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm', + request_serializer=business_glossary.GetGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['get_glossary_term'] + + @property + def list_glossary_terms(self) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse]: + r"""Return a callable for the list glossary terms method over gRPC. + + Lists GlossaryTerm resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryTermsRequest], + ~.ListGlossaryTermsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossary_terms' not in self._stubs: + self._stubs['list_glossary_terms'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms', + request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, + response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, + ) + return self._stubs['list_glossary_terms'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'BusinessGlossaryServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2493fc37b685 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py @@ -0,0 +1,973 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import BusinessGlossaryServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BusinessGlossaryServiceGrpcAsyncIOTransport(BusinessGlossaryServiceTransport): + """gRPC AsyncIO backend transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_glossary(self) -> Callable[ + [business_glossary.CreateGlossaryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create glossary method over gRPC. + + Creates a new Glossary resource. + + Returns: + Callable[[~.CreateGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary' not in self._stubs: + self._stubs['create_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary', + request_serializer=business_glossary.CreateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_glossary'] + + @property + def update_glossary(self) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a Glossary resource. + + Returns: + Callable[[~.UpdateGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary' not in self._stubs: + self._stubs['update_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary', + request_serializer=business_glossary.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_glossary'] + + @property + def delete_glossary(self) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete glossary method over gRPC. + + Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + Returns: + Callable[[~.DeleteGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary' not in self._stubs: + self._stubs['delete_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary', + request_serializer=business_glossary.DeleteGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_glossary'] + + @property + def get_glossary(self) -> Callable[ + [business_glossary.GetGlossaryRequest], + Awaitable[business_glossary.Glossary]]: + r"""Return a callable for the get glossary method over gRPC. + + Gets a Glossary resource. + + Returns: + Callable[[~.GetGlossaryRequest], + Awaitable[~.Glossary]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary' not in self._stubs: + self._stubs['get_glossary'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary', + request_serializer=business_glossary.GetGlossaryRequest.serialize, + response_deserializer=business_glossary.Glossary.deserialize, + ) + return self._stubs['get_glossary'] + + @property + def list_glossaries(self) -> Callable[ + [business_glossary.ListGlossariesRequest], + Awaitable[business_glossary.ListGlossariesResponse]]: + r"""Return a callable for the list glossaries method over gRPC. + + Lists Glossary resources in a project and location. + + Returns: + Callable[[~.ListGlossariesRequest], + Awaitable[~.ListGlossariesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossaries' not in self._stubs: + self._stubs['list_glossaries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries', + request_serializer=business_glossary.ListGlossariesRequest.serialize, + response_deserializer=business_glossary.ListGlossariesResponse.deserialize, + ) + return self._stubs['list_glossaries'] + + @property + def create_glossary_category(self) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory]]: + r"""Return a callable for the create glossary category method over gRPC. + + Creates a new GlossaryCategory resource. + + Returns: + Callable[[~.CreateGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary_category' not in self._stubs: + self._stubs['create_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory', + request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['create_glossary_category'] + + @property + def update_glossary_category(self) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory]]: + r"""Return a callable for the update glossary category method over gRPC. + + Updates a GlossaryCategory resource. + + Returns: + Callable[[~.UpdateGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary_category' not in self._stubs: + self._stubs['update_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory', + request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['update_glossary_category'] + + @property + def delete_glossary_category(self) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete glossary category method over gRPC. + + Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + Returns: + Callable[[~.DeleteGlossaryCategoryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary_category' not in self._stubs: + self._stubs['delete_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory', + request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_glossary_category'] + + @property + def get_glossary_category(self) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory]]: + r"""Return a callable for the get glossary category method over gRPC. + + Gets a GlossaryCategory resource. + + Returns: + Callable[[~.GetGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary_category' not in self._stubs: + self._stubs['get_glossary_category'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory', + request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs['get_glossary_category'] + + @property + def list_glossary_categories(self) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Awaitable[business_glossary.ListGlossaryCategoriesResponse]]: + r"""Return a callable for the list glossary categories method over gRPC. + + Lists GlossaryCategory resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryCategoriesRequest], + Awaitable[~.ListGlossaryCategoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossary_categories' not in self._stubs: + self._stubs['list_glossary_categories'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories', + request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, + response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, + ) + return self._stubs['list_glossary_categories'] + + @property + def create_glossary_term(self) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm]]: + r"""Return a callable for the create glossary term method over gRPC. + + Creates a new GlossaryTerm resource. + + Returns: + Callable[[~.CreateGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_glossary_term' not in self._stubs: + self._stubs['create_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm', + request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['create_glossary_term'] + + @property + def update_glossary_term(self) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm]]: + r"""Return a callable for the update glossary term method over gRPC. + + Updates a GlossaryTerm resource. + + Returns: + Callable[[~.UpdateGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_glossary_term' not in self._stubs: + self._stubs['update_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm', + request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['update_glossary_term'] + + @property + def delete_glossary_term(self) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete glossary term method over gRPC. + + Deletes a GlossaryTerm resource. + + Returns: + Callable[[~.DeleteGlossaryTermRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_glossary_term' not in self._stubs: + self._stubs['delete_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm', + request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_glossary_term'] + + @property + def get_glossary_term(self) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm]]: + r"""Return a callable for the get glossary term method over gRPC. + + Gets a GlossaryTerm resource. + + Returns: + Callable[[~.GetGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_glossary_term' not in self._stubs: + self._stubs['get_glossary_term'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm', + request_serializer=business_glossary.GetGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs['get_glossary_term'] + + @property + def list_glossary_terms(self) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Awaitable[business_glossary.ListGlossaryTermsResponse]]: + r"""Return a callable for the list glossary terms method over gRPC. + + Lists GlossaryTerm resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryTermsRequest], + Awaitable[~.ListGlossaryTermsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_glossary_terms' not in self._stubs: + self._stubs['list_glossary_terms'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms', + request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, + response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, + ) + return self._stubs['list_glossary_terms'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_glossary: self._wrap_method( + self.create_glossary, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary: self._wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary: self._wrap_method( + self.delete_glossary, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary: self._wrap_method( + self.get_glossary, + default_timeout=None, + client_info=client_info, + ), + self.list_glossaries: self._wrap_method( + self.list_glossaries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_category: self._wrap_method( + self.create_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_category: self._wrap_method( + self.update_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_category: self._wrap_method( + self.delete_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_category: self._wrap_method( + self.get_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_categories: self._wrap_method( + self.list_glossary_categories, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_term: self._wrap_method( + self.create_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_term: self._wrap_method( + self.update_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_term: self._wrap_method( + self.delete_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_term: self._wrap_method( + self.get_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_terms: self._wrap_method( + self.list_glossary_terms, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'BusinessGlossaryServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py new file mode 100644 index 000000000000..babf4e3a4beb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py @@ -0,0 +1,3535 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import business_glossary +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseBusinessGlossaryServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BusinessGlossaryServiceRestInterceptor: + """Interceptor for BusinessGlossaryService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BusinessGlossaryServiceRestTransport. + + .. code-block:: python + class MyCustomBusinessGlossaryServiceInterceptor(BusinessGlossaryServiceRestInterceptor): + def pre_create_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossaries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossaries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossary_categories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossary_categories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossary_terms(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossary_terms(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BusinessGlossaryServiceRestTransport(interceptor=MyCustomBusinessGlossaryServiceInterceptor()) + client = BusinessGlossaryServiceClient(transport=transport) + + + """ + def pre_create_glossary(self, request: business_glossary.CreateGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_glossary + + DEPRECATED. Please use the `post_create_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary` interceptor runs + before the `post_create_glossary_with_metadata` interceptor. + """ + return response + + def post_create_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_with_metadata` + interceptor in new development instead of the `post_create_glossary` interceptor. + When both interceptors are used, this `post_create_glossary_with_metadata` interceptor runs after the + `post_create_glossary` interceptor. The (possibly modified) response returned by + `post_create_glossary` will be passed to + `post_create_glossary_with_metadata`. + """ + return response, metadata + + def pre_create_glossary_category(self, request: business_glossary.CreateGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for create_glossary_category + + DEPRECATED. Please use the `post_create_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary_category` interceptor runs + before the `post_create_glossary_category_with_metadata` interceptor. + """ + return response + + def post_create_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_category_with_metadata` + interceptor in new development instead of the `post_create_glossary_category` interceptor. + When both interceptors are used, this `post_create_glossary_category_with_metadata` interceptor runs after the + `post_create_glossary_category` interceptor. The (possibly modified) response returned by + `post_create_glossary_category` will be passed to + `post_create_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_create_glossary_term(self, request: business_glossary.CreateGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for create_glossary_term + + DEPRECATED. Please use the `post_create_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary_term` interceptor runs + before the `post_create_glossary_term_with_metadata` interceptor. + """ + return response + + def post_create_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_term_with_metadata` + interceptor in new development instead of the `post_create_glossary_term` interceptor. + When both interceptors are used, this `post_create_glossary_term_with_metadata` interceptor runs after the + `post_create_glossary_term` interceptor. The (possibly modified) response returned by + `post_create_glossary_term` will be passed to + `post_create_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_delete_glossary(self, request: business_glossary.DeleteGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_delete_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_glossary + + DEPRECATED. Please use the `post_delete_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_delete_glossary` interceptor runs + before the `post_delete_glossary_with_metadata` interceptor. + """ + return response + + def post_delete_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_delete_glossary_with_metadata` + interceptor in new development instead of the `post_delete_glossary` interceptor. + When both interceptors are used, this `post_delete_glossary_with_metadata` interceptor runs after the + `post_delete_glossary` interceptor. The (possibly modified) response returned by + `post_delete_glossary` will be passed to + `post_delete_glossary_with_metadata`. + """ + return response, metadata + + def pre_delete_glossary_category(self, request: business_glossary.DeleteGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def pre_delete_glossary_term(self, request: business_glossary.DeleteGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def pre_get_glossary(self, request: business_glossary.GetGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary(self, response: business_glossary.Glossary) -> business_glossary.Glossary: + """Post-rpc interceptor for get_glossary + + DEPRECATED. Please use the `post_get_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary` interceptor runs + before the `post_get_glossary_with_metadata` interceptor. + """ + return response + + def post_get_glossary_with_metadata(self, response: business_glossary.Glossary, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.Glossary, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_with_metadata` + interceptor in new development instead of the `post_get_glossary` interceptor. + When both interceptors are used, this `post_get_glossary_with_metadata` interceptor runs after the + `post_get_glossary` interceptor. The (possibly modified) response returned by + `post_get_glossary` will be passed to + `post_get_glossary_with_metadata`. + """ + return response, metadata + + def pre_get_glossary_category(self, request: business_glossary.GetGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for get_glossary_category + + DEPRECATED. Please use the `post_get_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary_category` interceptor runs + before the `post_get_glossary_category_with_metadata` interceptor. + """ + return response + + def post_get_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_category_with_metadata` + interceptor in new development instead of the `post_get_glossary_category` interceptor. + When both interceptors are used, this `post_get_glossary_category_with_metadata` interceptor runs after the + `post_get_glossary_category` interceptor. The (possibly modified) response returned by + `post_get_glossary_category` will be passed to + `post_get_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_get_glossary_term(self, request: business_glossary.GetGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for get_glossary_term + + DEPRECATED. Please use the `post_get_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary_term` interceptor runs + before the `post_get_glossary_term_with_metadata` interceptor. + """ + return response + + def post_get_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_term_with_metadata` + interceptor in new development instead of the `post_get_glossary_term` interceptor. + When both interceptors are used, this `post_get_glossary_term_with_metadata` interceptor runs after the + `post_get_glossary_term` interceptor. The (possibly modified) response returned by + `post_get_glossary_term` will be passed to + `post_get_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_list_glossaries(self, request: business_glossary.ListGlossariesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossariesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_glossaries + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossaries(self, response: business_glossary.ListGlossariesResponse) -> business_glossary.ListGlossariesResponse: + """Post-rpc interceptor for list_glossaries + + DEPRECATED. Please use the `post_list_glossaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossaries` interceptor runs + before the `post_list_glossaries_with_metadata` interceptor. + """ + return response + + def post_list_glossaries_with_metadata(self, response: business_glossary.ListGlossariesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossariesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_glossaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossaries_with_metadata` + interceptor in new development instead of the `post_list_glossaries` interceptor. + When both interceptors are used, this `post_list_glossaries_with_metadata` interceptor runs after the + `post_list_glossaries` interceptor. The (possibly modified) response returned by + `post_list_glossaries` will be passed to + `post_list_glossaries_with_metadata`. + """ + return response, metadata + + def pre_list_glossary_categories(self, request: business_glossary.ListGlossaryCategoriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryCategoriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_glossary_categories + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossary_categories(self, response: business_glossary.ListGlossaryCategoriesResponse) -> business_glossary.ListGlossaryCategoriesResponse: + """Post-rpc interceptor for list_glossary_categories + + DEPRECATED. Please use the `post_list_glossary_categories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossary_categories` interceptor runs + before the `post_list_glossary_categories_with_metadata` interceptor. + """ + return response + + def post_list_glossary_categories_with_metadata(self, response: business_glossary.ListGlossaryCategoriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryCategoriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_glossary_categories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossary_categories_with_metadata` + interceptor in new development instead of the `post_list_glossary_categories` interceptor. + When both interceptors are used, this `post_list_glossary_categories_with_metadata` interceptor runs after the + `post_list_glossary_categories` interceptor. The (possibly modified) response returned by + `post_list_glossary_categories` will be passed to + `post_list_glossary_categories_with_metadata`. + """ + return response, metadata + + def pre_list_glossary_terms(self, request: business_glossary.ListGlossaryTermsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryTermsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_glossary_terms + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossary_terms(self, response: business_glossary.ListGlossaryTermsResponse) -> business_glossary.ListGlossaryTermsResponse: + """Post-rpc interceptor for list_glossary_terms + + DEPRECATED. Please use the `post_list_glossary_terms_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossary_terms` interceptor runs + before the `post_list_glossary_terms_with_metadata` interceptor. + """ + return response + + def post_list_glossary_terms_with_metadata(self, response: business_glossary.ListGlossaryTermsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryTermsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_glossary_terms + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossary_terms_with_metadata` + interceptor in new development instead of the `post_list_glossary_terms` interceptor. + When both interceptors are used, this `post_list_glossary_terms_with_metadata` interceptor runs after the + `post_list_glossary_terms` interceptor. The (possibly modified) response returned by + `post_list_glossary_terms` will be passed to + `post_list_glossary_terms_with_metadata`. + """ + return response, metadata + + def pre_update_glossary(self, request: business_glossary.UpdateGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_glossary + + DEPRECATED. Please use the `post_update_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary` interceptor runs + before the `post_update_glossary_with_metadata` interceptor. + """ + return response + + def post_update_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_with_metadata` + interceptor in new development instead of the `post_update_glossary` interceptor. + When both interceptors are used, this `post_update_glossary_with_metadata` interceptor runs after the + `post_update_glossary` interceptor. The (possibly modified) response returned by + `post_update_glossary` will be passed to + `post_update_glossary_with_metadata`. + """ + return response, metadata + + def pre_update_glossary_category(self, request: business_glossary.UpdateGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for update_glossary_category + + DEPRECATED. Please use the `post_update_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary_category` interceptor runs + before the `post_update_glossary_category_with_metadata` interceptor. + """ + return response + + def post_update_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_category_with_metadata` + interceptor in new development instead of the `post_update_glossary_category` interceptor. + When both interceptors are used, this `post_update_glossary_category_with_metadata` interceptor runs after the + `post_update_glossary_category` interceptor. The (possibly modified) response returned by + `post_update_glossary_category` will be passed to + `post_update_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_update_glossary_term(self, request: business_glossary.UpdateGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for update_glossary_term + + DEPRECATED. Please use the `post_update_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary_term` interceptor runs + before the `post_update_glossary_term_with_metadata` interceptor. + """ + return response + + def post_update_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_term_with_metadata` + interceptor in new development instead of the `post_update_glossary_term` interceptor. + When both interceptors are used, this `post_update_glossary_term_with_metadata` interceptor runs after the + `post_update_glossary_term` interceptor. The (possibly modified) response returned by + `post_update_glossary_term` will be passed to + `post_update_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BusinessGlossaryServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BusinessGlossaryServiceRestInterceptor + + +class BusinessGlossaryServiceRestTransport(_BaseBusinessGlossaryServiceRestTransport): + """REST backend synchronous transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[BusinessGlossaryServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BusinessGlossaryServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.CreateGlossaryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create glossary method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryRequest): + The request object. Create Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_http_options() + + request, metadata = self._interceptor.pre_create_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._CreateGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.CreateGlossaryCategoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryCategory: + r"""Call the create glossary category method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryCategoryRequest): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_http_options() + + request, metadata = self._interceptor.pre_create_glossary_category(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryCategory", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._CreateGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_category_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.CreateGlossaryTermRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryTerm: + r"""Call the create glossary term method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryTermRequest): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_http_options() + + request, metadata = self._interceptor.pre_create_glossary_term(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryTerm", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._CreateGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_term_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.DeleteGlossaryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete glossary method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryRequest): + The request object. Delete Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_http_options() + + request, metadata = self._interceptor.pre_delete_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._DeleteGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_glossary_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.DeleteGlossaryCategoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete glossary category method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryCategoryRequest): + The request object. Delete GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_http_options() + + request, metadata = self._interceptor.pre_delete_glossary_category(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryCategory", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._DeleteGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.DeleteGlossaryTermRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete glossary term method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryTermRequest): + The request object. Delete GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_http_options() + + request, metadata = self._interceptor.pre_delete_glossary_term(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryTerm", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._DeleteGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.GetGlossaryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.Glossary: + r"""Call the get glossary method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryRequest): + The request object. Get Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_http_options() + + request, metadata = self._interceptor.pre_get_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.Glossary() + pb_resp = business_glossary.Glossary.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.Glossary.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.GetGlossaryCategoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryCategory: + r"""Call the get glossary category method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryCategoryRequest): + The request object. Get GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_http_options() + + request, metadata = self._interceptor.pre_get_glossary_category(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryCategory", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_category_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.GetGlossaryTermRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryTerm: + r"""Call the get glossary term method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryTermRequest): + The request object. Get GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_http_options() + + request, metadata = self._interceptor.pre_get_glossary_term(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryTerm", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_term_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaries(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.ListGlossariesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.ListGlossariesResponse: + r"""Call the list glossaries method over HTTP. + + Args: + request (~.business_glossary.ListGlossariesRequest): + The request object. List Glossaries Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossariesResponse: + List Glossaries Response + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_http_options() + + request, metadata = self._interceptor.pre_list_glossaries(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaries", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListGlossaries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossariesResponse() + pb_resp = business_glossary.ListGlossariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossaries_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.ListGlossariesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaryCategories(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaryCategories") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.ListGlossaryCategoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.ListGlossaryCategoriesResponse: + r"""Call the list glossary categories method over HTTP. + + Args: + request (~.business_glossary.ListGlossaryCategoriesRequest): + The request object. List GlossaryCategories Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossaryCategoriesResponse: + List GlossaryCategories Response + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_http_options() + + request, metadata = self._interceptor.pre_list_glossary_categories(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryCategories", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryCategories", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListGlossaryCategories._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossaryCategoriesResponse() + pb_resp = business_glossary.ListGlossaryCategoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossary_categories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossary_categories_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.ListGlossaryCategoriesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryCategories", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaryTerms(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaryTerms") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: business_glossary.ListGlossaryTermsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.ListGlossaryTermsResponse: + r"""Call the list glossary terms method over HTTP. + + Args: + request (~.business_glossary.ListGlossaryTermsRequest): + The request object. List GlossaryTerms Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossaryTermsResponse: + List GlossaryTerms Response + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_http_options() + + request, metadata = self._interceptor.pre_list_glossary_terms(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryTerms", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryTerms", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListGlossaryTerms._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossaryTermsResponse() + pb_resp = business_glossary.ListGlossaryTermsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossary_terms(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossary_terms_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.ListGlossaryTermsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryTerms", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.UpdateGlossaryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update glossary method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryRequest): + The request object. Update Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_http_options() + + request, metadata = self._interceptor.pre_update_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._UpdateGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.UpdateGlossaryCategoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryCategory: + r"""Call the update glossary category method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryCategoryRequest): + The request object. Update GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_http_options() + + request, metadata = self._interceptor.pre_update_glossary_category(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryCategory", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._UpdateGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_category_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: business_glossary.UpdateGlossaryTermRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> business_glossary.GlossaryTerm: + r"""Call the update glossary term method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryTermRequest): + The request object. Update GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_http_options() + + request, metadata = self._interceptor.pre_update_glossary_term(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryTerm", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._UpdateGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_term_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_glossary(self) -> Callable[ + [business_glossary.CreateGlossaryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary_category(self) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary_term(self) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + business_glossary.GlossaryTerm]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary(self) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary_category(self) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary_term(self) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary(self) -> Callable[ + [business_glossary.GetGlossaryRequest], + business_glossary.Glossary]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary_category(self) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary_term(self) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + business_glossary.GlossaryTerm]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossaries(self) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossary_categories(self) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaryCategories(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossary_terms(self) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaryTerms(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary(self) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary_category(self) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary_term(self) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + business_glossary.GlossaryTerm]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseBusinessGlossaryServiceRestTransport._BaseGetLocation, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseBusinessGlossaryServiceRestTransport._BaseListLocations, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseBusinessGlossaryServiceRestTransport._BaseGetOperation, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseBusinessGlossaryServiceRestTransport._BaseListOperations, BusinessGlossaryServiceRestStub): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'BusinessGlossaryServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py new file mode 100644 index 000000000000..8692eaba3d04 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py @@ -0,0 +1,883 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import business_glossary +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseBusinessGlossaryServiceRestTransport(BusinessGlossaryServiceTransport): + """Base REST backend transport for BusinessGlossaryService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "glossaryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/glossaries', + 'body': 'glossary', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "categoryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/categories', + 'body': 'category', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "termId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/terms', + 'body': 'term', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/categories/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/terms/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/categories/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/terms/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/glossaries', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaryCategories: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/categories', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossaryCategoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaryTerms: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/terms', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossaryTermsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{glossary.name=projects/*/locations/*/glossaries/*}', + 'body': 'glossary', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}', + 'body': 'category', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}', + 'body': 'term', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseBusinessGlossaryServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py new file mode 100644 index 000000000000..9143de05010c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CatalogServiceClient +from .async_client import CatalogServiceAsyncClient + +__all__ = ( + 'CatalogServiceClient', + 'CatalogServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py new file mode 100644 index 000000000000..dbd874963be6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -0,0 +1,4109 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport +from .client import CatalogServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class CatalogServiceAsyncClient: + """The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + """ + + _client: CatalogServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CatalogServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CatalogServiceClient._DEFAULT_UNIVERSE + + aspect_type_path = staticmethod(CatalogServiceClient.aspect_type_path) + parse_aspect_type_path = staticmethod(CatalogServiceClient.parse_aspect_type_path) + entry_path = staticmethod(CatalogServiceClient.entry_path) + parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) + entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) + parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) + entry_link_path = staticmethod(CatalogServiceClient.entry_link_path) + parse_entry_link_path = staticmethod(CatalogServiceClient.parse_entry_link_path) + entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) + parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + glossary_path = staticmethod(CatalogServiceClient.glossary_path) + parse_glossary_path = staticmethod(CatalogServiceClient.parse_glossary_path) + metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) + parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) + common_billing_account_path = staticmethod(CatalogServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CatalogServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CatalogServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CatalogServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CatalogServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(CatalogServiceClient.parse_common_organization_path) + common_project_path = staticmethod(CatalogServiceClient.common_project_path) + parse_common_project_path = staticmethod(CatalogServiceClient.parse_common_project_path) + common_location_path = staticmethod(CatalogServiceClient.common_location_path) + parse_common_location_path = staticmethod(CatalogServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceAsyncClient: The constructed client. + """ + return CatalogServiceClient.from_service_account_info.__func__(CatalogServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceAsyncClient: The constructed client. + """ + return CatalogServiceClient.from_service_account_file.__func__(CatalogServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CatalogServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CatalogServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CatalogServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CatalogServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the catalog service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CatalogServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.CatalogServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "credentialsType": None, + } + ) + + async def create_entry_type(self, + request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_type: Optional[catalog.EntryType] = None, + entry_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): + The request object. Create EntryType Request. + parent (:class:`str`): + Required. The resource name of the EntryType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type_id (:class:`str`): + Required. EntryType identifier. + This corresponds to the ``entry_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_type, entry_type_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryTypeRequest): + request = catalog.CreateEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_type is not None: + request.entry_type = entry_type + if entry_type_id is not None: + request.entry_type_id = entry_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_entry_type(self, + request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, + *, + entry_type: Optional[catalog.EntryType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): + The request object. Update EntryType Request. + entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry_type, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryTypeRequest): + request = catalog.UpdateEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_type is not None: + request.entry_type = entry_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_type.name", request.entry_type.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_entry_type(self, + request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): + The request object. Delete EntryType Request. + name (:class:`str`): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryTypeRequest): + request = catalog.DeleteEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_entry_types(self, + request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntryTypesAsyncPager: + r"""Lists EntryType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]]): + The request object. List EntryTypes request + parent (:class:`str`): + Required. The resource name of the EntryType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: + List EntryTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryTypesRequest): + request = catalog.ListEntryTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryTypesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_type(self, + request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryType: + r"""Gets an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): + The request object. Get EntryType request. + name (:class:`str`): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryType: + Entry Type is a template for creating + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryTypeRequest): + request = catalog.GetEntryTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_aspect_type(self, + request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + aspect_type: Optional[catalog.AspectType] = None, + aspect_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): + The request object. Create AspectType Request. + parent (:class:`str`): + Required. The resource name of the AspectType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): + Required. AspectType Resource. + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type_id (:class:`str`): + Required. AspectType identifier. + This corresponds to the ``aspect_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, aspect_type, aspect_type_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateAspectTypeRequest): + request = catalog.CreateAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if aspect_type is not None: + request.aspect_type = aspect_type + if aspect_type_id is not None: + request.aspect_type_id = aspect_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_aspect_type(self, + request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, + *, + aspect_type: Optional[catalog.AspectType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]]): + The request object. Update AspectType Request + aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): + Required. AspectType Resource + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [aspect_type, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateAspectTypeRequest): + request = catalog.UpdateAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if aspect_type is not None: + request.aspect_type = aspect_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("aspect_type.name", request.aspect_type.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_aspect_type(self, + request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): + The request object. Delete AspectType Request. + name (:class:`str`): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteAspectTypeRequest): + request = catalog.DeleteAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_aspect_types(self, + request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAspectTypesAsyncPager: + r"""Lists AspectType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): + The request object. List AspectTypes request. + parent (:class:`str`): + Required. The resource name of the AspectType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: + List AspectTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListAspectTypesRequest): + request = catalog.ListAspectTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_aspect_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAspectTypesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_aspect_type(self, + request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.AspectType: + r"""Gets an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_aspect_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): + The request object. Get AspectType request. + name (:class:`str`): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.AspectType: + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetAspectTypeRequest): + request = catalog.GetAspectTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry_group(self, + request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group: Optional[catalog.EntryGroup] = None, + entry_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): + The request object. Create EntryGroup Request. + parent (:class:`str`): + Required. The resource name of the entryGroup, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (:class:`str`): + Required. EntryGroup identifier. + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_group, entry_group_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryGroupRequest): + request = catalog.CreateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group is not None: + request.entry_group = entry_group + if entry_group_id is not None: + request.entry_group_id = entry_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_entry_group(self, + request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[catalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): + The request object. Update EntryGroup Request. + entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry_group, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryGroupRequest): + request = catalog.UpdateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_entry_group(self, + request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): + The request object. Delete EntryGroup Request. + name (:class:`str`): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryGroupRequest): + request = catalog.DeleteEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_entry_groups(self, + request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntryGroupsAsyncPager: + r"""Lists EntryGroup resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]]): + The request object. List entryGroups request. + parent (:class:`str`): + Required. The resource name of the entryGroup location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: + List entry groups response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryGroupsRequest): + request = catalog.ListEntryGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_group(self, + request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]]): + The request object. Get EntryGroup request. + name (:class:`str`): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryGroup: + An Entry Group represents a logical + grouping of one or more Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryGroupRequest): + request = catalog.GetEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry(self, + request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry: Optional[catalog.Entry] = None, + entry_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Creates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): + The request object. Create Entry request. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (:class:`google.cloud.dataplex_v1.types.Entry`): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (:class:`str`): + Required. Entry identifier. It has to be unique within + an Entry Group. + + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or + systems other than Google Cloud. + + The maximum size of the field is 4000 characters. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry, entry_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryRequest): + request = catalog.CreateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry is not None: + request.entry = entry + if entry_id is not None: + request.entry_id = entry_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry(self, + request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[catalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Updates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): + The request object. Update Entry request. + entry (:class:`google.cloud.dataplex_v1.types.Entry`): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Mask of fields to update. To update Aspects, + the update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryRequest): + request = catalog.UpdateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry(self, + request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Deletes an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): + The request object. Delete Entry request. + name (:class:`str`): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryRequest): + request = catalog.DeleteEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entries(self, + request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntriesAsyncPager: + r"""Lists Entries within an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): + The request object. List Entries request. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: + List Entries response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntriesRequest): + request = catalog.ListEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry(self, + request: Optional[Union[catalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Gets an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): + The request object. Get Entry request. + name (:class:`str`): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryRequest): + request = catalog.GetEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lookup_entry(self, + request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Looks up an entry by name using the permission on the + source system. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): + The request object. Lookup Entry request using + permissions in the source system. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.LookupEntryRequest): + request = catalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.lookup_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_entries(self, + request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, + *, + name: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchEntriesAsyncPager: + r"""Searches for Entries matching the given query and + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]]): + The request object. + name (:class:`str`): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/global``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. The query against which entries in scope + should be matched. The query syntax is defined in + `Search syntax for Dataplex Universal + Catalog `__. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, query] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.SearchEntriesRequest): + request = catalog.SearchEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.search_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_metadata_job(self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a metadata job. For example, use a metadata + job to import metadata from a third-party system into + Dataplex Universal Catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" + metadata_job.type_ = "EXPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): + The request object. Create metadata job request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (:class:`str`): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, metadata_job, metadata_job_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_metadata_job(self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): + The request object. Get metadata job request. + name (:class:`str`): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_metadata_jobs(self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMetadataJobsAsyncPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): + The request object. List metadata jobs request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetadataJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_metadata_job(self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): + The request object. Cancel metadata job request. + name (:class:`str`): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_entry_link(self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = await client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]]): + The request object. Request message for CreateEntryLink. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (:class:`google.cloud.dataplex_v1.types.EntryLink`): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (:class:`str`): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry_link(self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]]): + The request object. Request message for DeleteEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_link(self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]]): + The request object. Request message for GetEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CatalogServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "CatalogServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py new file mode 100644 index 000000000000..d96514c69ff1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -0,0 +1,4507 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CatalogServiceGrpcTransport +from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport +from .transports.rest import CatalogServiceRestTransport + + +class CatalogServiceClientMeta(type): + """Metaclass for the CatalogService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] + _transport_registry["grpc"] = CatalogServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CatalogServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CatalogServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CatalogServiceClient(metaclass=CatalogServiceClientMeta): + """The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CatalogServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CatalogServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CatalogServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def aspect_type_path(project: str,location: str,aspect_type: str,) -> str: + """Returns a fully-qualified aspect_type string.""" + return "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) + + @staticmethod + def parse_aspect_type_path(path: str) -> Dict[str,str]: + """Parses a aspect_type path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/aspectTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: + """Returns a fully-qualified entry string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + + @staticmethod + def parse_entry_path(path: str) -> Dict[str,str]: + """Parses a entry path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_group_path(project: str,location: str,entry_group: str,) -> str: + """Returns a fully-qualified entry_group string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + + @staticmethod + def parse_entry_group_path(path: str) -> Dict[str,str]: + """Parses a entry_group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_link_path(project: str,location: str,entry_group: str,entry_link: str,) -> str: + """Returns a fully-qualified entry_link string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format(project=project, location=location, entry_group=entry_group, entry_link=entry_link, ) + + @staticmethod + def parse_entry_link_path(path: str) -> Dict[str,str]: + """Parses a entry_link path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entryLinks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_type_path(project: str,location: str,entry_type: str,) -> str: + """Returns a fully-qualified entry_type string.""" + return "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) + + @staticmethod + def parse_entry_type_path(path: str) -> Dict[str,str]: + """Parses a entry_type path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def glossary_path(project: str,location: str,glossary: str,) -> str: + """Returns a fully-qualified glossary string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + + @staticmethod + def parse_glossary_path(path: str) -> Dict[str,str]: + """Parses a glossary path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def metadata_job_path(project: str,location: str,metadataJob: str,) -> str: + """Returns a fully-qualified metadata_job string.""" + return "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) + + @staticmethod + def parse_metadata_job_path(path: str) -> Dict[str,str]: + """Parses a metadata_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CatalogServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the catalog service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CatalogServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CatalogServiceClient._read_environment_variables() + self._client_cert_source = CatalogServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = CatalogServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CatalogServiceTransport) + if transport_provided: + # transport is a CatalogServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CatalogServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CatalogServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport]] = ( + CatalogServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CatalogServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.CatalogServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "credentialsType": None, + } + ) + + def create_entry_type(self, + request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_type: Optional[catalog.EntryType] = None, + entry_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): + The request object. Create EntryType Request. + parent (str): + Required. The resource name of the EntryType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_type_id (str): + Required. EntryType identifier. + This corresponds to the ``entry_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_type, entry_type_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryTypeRequest): + request = catalog.CreateEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_type is not None: + request.entry_type = entry_type + if entry_type_id is not None: + request.entry_type_id = entry_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_entry_type(self, + request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, + *, + entry_type: Optional[catalog.EntryType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): + The request object. Update EntryType Request. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + This corresponds to the ``entry_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryType` Entry + Type is a template for creating Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry_type, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryTypeRequest): + request = catalog.UpdateEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_type is not None: + request.entry_type = entry_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_type.name", request.entry_type.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_entry_type(self, + request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): + The request object. Delete EntryType Request. + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryTypeRequest): + request = catalog.DeleteEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entry_types(self, + request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntryTypesPager: + r"""Lists EntryType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]): + The request object. List EntryTypes request + parent (str): + Required. The resource name of the EntryType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: + List EntryTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryTypesRequest): + request = catalog.ListEntryTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryTypesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_type(self, + request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryType: + r"""Gets an EntryType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): + The request object. Get EntryType request. + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryType: + Entry Type is a template for creating + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryTypeRequest): + request = catalog.GetEntryTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_aspect_type(self, + request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + aspect_type: Optional[catalog.AspectType] = None, + aspect_type_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): + The request object. Create AspectType Request. + parent (str): + Required. The resource name of the AspectType, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource. + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + aspect_type_id (str): + Required. AspectType identifier. + This corresponds to the ``aspect_type_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, aspect_type, aspect_type_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateAspectTypeRequest): + request = catalog.CreateAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if aspect_type is not None: + request.aspect_type = aspect_type + if aspect_type_id is not None: + request.aspect_type_id = aspect_type_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_aspect_type(self, + request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, + *, + aspect_type: Optional[catalog.AspectType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]): + The request object. Update AspectType Request + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource + This corresponds to the ``aspect_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [aspect_type, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateAspectTypeRequest): + request = catalog.UpdateAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if aspect_type is not None: + request.aspect_type = aspect_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("aspect_type.name", request.aspect_type.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.AspectType, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_aspect_type(self, + request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): + The request object. Delete AspectType Request. + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteAspectTypeRequest): + request = catalog.DeleteAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_aspect_types(self, + request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAspectTypesPager: + r"""Lists AspectType resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): + The request object. List AspectTypes request. + parent (str): + Required. The resource name of the AspectType location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: + List AspectTypes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListAspectTypesRequest): + request = catalog.ListAspectTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_aspect_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAspectTypesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_aspect_type(self, + request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.AspectType: + r"""Gets an AspectType. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_aspect_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): + The request object. Get AspectType request. + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.AspectType: + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetAspectTypeRequest): + request = catalog.GetAspectTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_aspect_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry_group(self, + request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group: Optional[catalog.EntryGroup] = None, + entry_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): + The request object. Create EntryGroup Request. + parent (str): + Required. The resource name of the entryGroup, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (str): + Required. EntryGroup identifier. + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_group, entry_group_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryGroupRequest): + request = catalog.CreateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group is not None: + request.entry_group = entry_group + if entry_group_id is not None: + request.entry_group_id = entry_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_entry_group(self, + request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[catalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): + The request object. Update EntryGroup Request. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.EntryGroup` An + Entry Group represents a logical grouping of one or more + Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry_group, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryGroupRequest): + request = catalog.UpdateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.EntryGroup, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_entry_group(self, + request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): + The request object. Delete EntryGroup Request. + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryGroupRequest): + request = catalog.DeleteEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entry_groups(self, + request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntryGroupsPager: + r"""Lists EntryGroup resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]): + The request object. List entryGroups request. + parent (str): + Required. The resource name of the entryGroup location, + of the form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: + List entry groups response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntryGroupsRequest): + request = catalog.ListEntryGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_group(self, + request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]): + The request object. Get EntryGroup request. + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryGroup: + An Entry Group represents a logical + grouping of one or more Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryGroupRequest): + request = catalog.GetEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry(self, + request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry: Optional[catalog.Entry] = None, + entry_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Creates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): + The request object. Create Entry request. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (str): + Required. Entry identifier. It has to be unique within + an Entry Group. + + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or + systems other than Google Cloud. + + The maximum size of the field is 4000 characters. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry, entry_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryRequest): + request = catalog.CreateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry is not None: + request.entry = entry + if entry_id is not None: + request.entry_id = entry_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry(self, + request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[catalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Updates an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): + The request object. Update Entry request. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. To update Aspects, + the update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [entry, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.UpdateEntryRequest): + request = catalog.UpdateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry(self, + request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Deletes an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): + The request object. Delete Entry request. + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryRequest): + request = catalog.DeleteEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entries(self, + request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntriesPager: + r"""Lists Entries within an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): + The request object. List Entries request. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: + List Entries response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListEntriesRequest): + request = catalog.ListEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry(self, + request: Optional[Union[catalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Gets an Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): + The request object. Get Entry request. + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryRequest): + request = catalog.GetEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lookup_entry(self, + request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.Entry: + r"""Looks up an entry by name using the permission on the + source system. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): + The request object. Lookup Entry request using + permissions in the source system. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.LookupEntryRequest): + request = catalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_entries(self, + request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, + *, + name: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchEntriesPager: + r"""Searches for Entries matching the given query and + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]): + The request object. + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/global``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. The query against which entries in scope + should be matched. The query syntax is defined in + `Search syntax for Dataplex Universal + Catalog `__. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, query] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.SearchEntriesRequest): + request = catalog.SearchEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_metadata_job(self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a metadata job. For example, use a metadata + job to import metadata from a third-party system into + Dataplex Universal Catalog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" + metadata_job.type_ = "EXPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): + The request object. Create metadata job request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, metadata_job, metadata_job_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_metadata_job(self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): + The request object. Get metadata job request. + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_metadata_jobs(self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMetadataJobsPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): + The request object. List metadata jobs request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetadataJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_metadata_job(self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): + The request object. Cancel metadata job request. + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_entry_link(self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]): + The request object. Request message for CreateEntryLink. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry_link(self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]): + The request object. Request message for DeleteEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_link(self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]): + The request object. Request message for GetEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CatalogServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "CatalogServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py new file mode 100644 index 000000000000..026fca877ecd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py @@ -0,0 +1,861 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import catalog + + +class ListEntryTypesPager: + """A pager for iterating through ``list_entry_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryTypes`` requests and continue to iterate + through the ``entry_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntryTypesResponse], + request: catalog.ListEntryTypesRequest, + response: catalog.ListEntryTypesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntryTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntryTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.EntryType]: + for page in self.pages: + yield from page.entry_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryTypesAsyncPager: + """A pager for iterating through ``list_entry_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryTypes`` requests and continue to iterate + through the ``entry_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntryTypesResponse]], + request: catalog.ListEntryTypesRequest, + response: catalog.ListEntryTypesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntryTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntryTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.EntryType]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAspectTypesPager: + """A pager for iterating through ``list_aspect_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``aspect_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAspectTypes`` requests and continue to iterate + through the ``aspect_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListAspectTypesResponse], + request: catalog.ListAspectTypesRequest, + response: catalog.ListAspectTypesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListAspectTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListAspectTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.AspectType]: + for page in self.pages: + yield from page.aspect_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAspectTypesAsyncPager: + """A pager for iterating through ``list_aspect_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``aspect_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAspectTypes`` requests and continue to iterate + through the ``aspect_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListAspectTypesResponse]], + request: catalog.ListAspectTypesRequest, + response: catalog.ListAspectTypesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListAspectTypesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListAspectTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.AspectType]: + async def async_generator(): + async for page in self.pages: + for response in page.aspect_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntryGroupsResponse], + request: catalog.ListEntryGroupsRequest, + response: catalog.ListEntryGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntryGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.EntryGroup]: + for page in self.pages: + yield from page.entry_groups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsAsyncPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntryGroupsResponse]], + request: catalog.ListEntryGroupsRequest, + response: catalog.ListEntryGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntryGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.EntryGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListEntriesResponse], + request: catalog.ListEntriesRequest, + response: catalog.ListEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.Entry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesAsyncPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListEntriesResponse]], + request: catalog.ListEntriesRequest, + response: catalog.ListEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.Entry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchEntriesPager: + """A pager for iterating through ``search_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchEntries`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.SearchEntriesResponse], + request: catalog.SearchEntriesRequest, + response: catalog.SearchEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.SearchEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.SearchEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.SearchEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.SearchEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.SearchEntriesResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchEntriesAsyncPager: + """A pager for iterating through ``search_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchEntries`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.SearchEntriesResponse]], + request: catalog.SearchEntriesRequest, + response: catalog.SearchEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.SearchEntriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.SearchEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.SearchEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.SearchEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.SearchEntriesResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetadataJobsPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., catalog.ListMetadataJobsResponse], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[catalog.MetadataJob]: + for page in self.pages: + yield from page.metadata_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMetadataJobsAsyncPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: + async def async_generator(): + async for page in self.pages: + for response in page.metadata_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst new file mode 100644 index 000000000000..c14dcbeef235 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CatalogServiceTransport` is the ABC for all transports. +- public child `CatalogServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CatalogServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCatalogServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CatalogServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py new file mode 100644 index 000000000000..8ee5c702fdcc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CatalogServiceTransport +from .grpc import CatalogServiceGrpcTransport +from .grpc_asyncio import CatalogServiceGrpcAsyncIOTransport +from .rest import CatalogServiceRestTransport +from .rest import CatalogServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] +_transport_registry['grpc'] = CatalogServiceGrpcTransport +_transport_registry['grpc_asyncio'] = CatalogServiceGrpcAsyncIOTransport +_transport_registry['rest'] = CatalogServiceRestTransport + +__all__ = ( + 'CatalogServiceTransport', + 'CatalogServiceGrpcTransport', + 'CatalogServiceGrpcAsyncIOTransport', + 'CatalogServiceRestTransport', + 'CatalogServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py new file mode 100644 index 000000000000..501523591741 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -0,0 +1,754 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class CatalogServiceTransport(abc.ABC): + """Abstract transport class for CatalogService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_entry_type: gapic_v1.method.wrap_method( + self.create_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_type: gapic_v1.method.wrap_method( + self.update_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_type: gapic_v1.method.wrap_method( + self.delete_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_types: gapic_v1.method.wrap_method( + self.list_entry_types, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_type: gapic_v1.method.wrap_method( + self.get_entry_type, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_aspect_type: gapic_v1.method.wrap_method( + self.create_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_aspect_type: gapic_v1.method.wrap_method( + self.update_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_aspect_type: gapic_v1.method.wrap_method( + self.delete_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_aspect_types: gapic_v1.method.wrap_method( + self.list_aspect_types, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aspect_type: gapic_v1.method.wrap_method( + self.get_aspect_type, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry_group: gapic_v1.method.wrap_method( + self.create_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_group: gapic_v1.method.wrap_method( + self.update_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_group: gapic_v1.method.wrap_method( + self.delete_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_groups: gapic_v1.method.wrap_method( + self.list_entry_groups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_group: gapic_v1.method.wrap_method( + self.get_entry_group, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry: gapic_v1.method.wrap_method( + self.create_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry: gapic_v1.method.wrap_method( + self.update_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry: gapic_v1.method.wrap_method( + self.delete_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entries: gapic_v1.method.wrap_method( + self.list_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_entry: gapic_v1.method.wrap_method( + self.get_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lookup_entry: gapic_v1.method.wrap_method( + self.lookup_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.search_entries: gapic_v1.method.wrap_method( + self.search_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_metadata_job: gapic_v1.method.wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: gapic_v1.method.wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: gapic_v1.method.wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: gapic_v1.method.wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.create_entry_link: gapic_v1.method.wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: gapic_v1.method.wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: gapic_v1.method.wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + Union[ + catalog.ListEntryTypesResponse, + Awaitable[catalog.ListEntryTypesResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + Union[ + catalog.EntryType, + Awaitable[catalog.EntryType] + ]]: + raise NotImplementedError() + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + Union[ + catalog.ListAspectTypesResponse, + Awaitable[catalog.ListAspectTypesResponse] + ]]: + raise NotImplementedError() + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + Union[ + catalog.AspectType, + Awaitable[catalog.AspectType] + ]]: + raise NotImplementedError() + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + Union[ + catalog.ListEntryGroupsResponse, + Awaitable[catalog.ListEntryGroupsResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + Union[ + catalog.EntryGroup, + Awaitable[catalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + Union[ + catalog.ListEntriesResponse, + Awaitable[catalog.ListEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + Union[ + catalog.Entry, + Awaitable[catalog.Entry] + ]]: + raise NotImplementedError() + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + Union[ + catalog.SearchEntriesResponse, + Awaitable[catalog.SearchEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + Union[ + catalog.MetadataJob, + Awaitable[catalog.MetadataJob] + ]]: + raise NotImplementedError() + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + Union[ + catalog.ListMetadataJobsResponse, + Awaitable[catalog.ListMetadataJobsResponse] + ]]: + raise NotImplementedError() + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_entry_link(self) -> Callable[ + [catalog.CreateEntryLinkRequest], + Union[ + catalog.EntryLink, + Awaitable[catalog.EntryLink] + ]]: + raise NotImplementedError() + + @property + def delete_entry_link(self) -> Callable[ + [catalog.DeleteEntryLinkRequest], + Union[ + catalog.EntryLink, + Awaitable[catalog.EntryLink] + ]]: + raise NotImplementedError() + + @property + def get_entry_link(self) -> Callable[ + [catalog.GetEntryLinkRequest], + Union[ + catalog.EntryLink, + Awaitable[catalog.EntryLink] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CatalogServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py new file mode 100644 index 000000000000..108a93fed235 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -0,0 +1,1220 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CatalogServiceGrpcTransport(CatalogServiceTransport): + """gRPC backend transport for CatalogService. + + The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create entry type method over gRPC. + + Creates an EntryType. + + Returns: + Callable[[~.CreateEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_type' not in self._stubs: + self._stubs['create_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', + request_serializer=catalog.CreateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_type'] + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update entry type method over gRPC. + + Updates an EntryType. + + Returns: + Callable[[~.UpdateEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_type' not in self._stubs: + self._stubs['update_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', + request_serializer=catalog.UpdateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_type'] + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete entry type method over gRPC. + + Deletes an EntryType. + + Returns: + Callable[[~.DeleteEntryTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_type' not in self._stubs: + self._stubs['delete_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', + request_serializer=catalog.DeleteEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_type'] + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + catalog.ListEntryTypesResponse]: + r"""Return a callable for the list entry types method over gRPC. + + Lists EntryType resources in a project and location. + + Returns: + Callable[[~.ListEntryTypesRequest], + ~.ListEntryTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_types' not in self._stubs: + self._stubs['list_entry_types'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', + request_serializer=catalog.ListEntryTypesRequest.serialize, + response_deserializer=catalog.ListEntryTypesResponse.deserialize, + ) + return self._stubs['list_entry_types'] + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + catalog.EntryType]: + r"""Return a callable for the get entry type method over gRPC. + + Gets an EntryType. + + Returns: + Callable[[~.GetEntryTypeRequest], + ~.EntryType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_type' not in self._stubs: + self._stubs['get_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryType', + request_serializer=catalog.GetEntryTypeRequest.serialize, + response_deserializer=catalog.EntryType.deserialize, + ) + return self._stubs['get_entry_type'] + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create aspect type method over gRPC. + + Creates an AspectType. + + Returns: + Callable[[~.CreateAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_aspect_type' not in self._stubs: + self._stubs['create_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', + request_serializer=catalog.CreateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_aspect_type'] + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update aspect type method over gRPC. + + Updates an AspectType. + + Returns: + Callable[[~.UpdateAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_aspect_type' not in self._stubs: + self._stubs['update_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', + request_serializer=catalog.UpdateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_aspect_type'] + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete aspect type method over gRPC. + + Deletes an AspectType. + + Returns: + Callable[[~.DeleteAspectTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_aspect_type' not in self._stubs: + self._stubs['delete_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', + request_serializer=catalog.DeleteAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_aspect_type'] + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + catalog.ListAspectTypesResponse]: + r"""Return a callable for the list aspect types method over gRPC. + + Lists AspectType resources in a project and location. + + Returns: + Callable[[~.ListAspectTypesRequest], + ~.ListAspectTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_aspect_types' not in self._stubs: + self._stubs['list_aspect_types'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', + request_serializer=catalog.ListAspectTypesRequest.serialize, + response_deserializer=catalog.ListAspectTypesResponse.deserialize, + ) + return self._stubs['list_aspect_types'] + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + catalog.AspectType]: + r"""Return a callable for the get aspect type method over gRPC. + + Gets an AspectType. + + Returns: + Callable[[~.GetAspectTypeRequest], + ~.AspectType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_aspect_type' not in self._stubs: + self._stubs['get_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetAspectType', + request_serializer=catalog.GetAspectTypeRequest.serialize, + response_deserializer=catalog.AspectType.deserialize, + ) + return self._stubs['get_aspect_type'] + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an EntryGroup. + + Returns: + Callable[[~.CreateEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', + request_serializer=catalog.CreateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', + request_serializer=catalog.UpdateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', + request_serializer=catalog.DeleteEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + catalog.ListEntryGroupsResponse]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists EntryGroup resources in a project and location. + + Returns: + Callable[[~.ListEntryGroupsRequest], + ~.ListEntryGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', + request_serializer=catalog.ListEntryGroupsRequest.serialize, + response_deserializer=catalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + catalog.EntryGroup]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', + request_serializer=catalog.GetEntryGroupRequest.serialize, + response_deserializer=catalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + catalog.Entry]: + r"""Return a callable for the create entry method over gRPC. + + Creates an Entry. + + Returns: + Callable[[~.CreateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntry', + request_serializer=catalog.CreateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + catalog.Entry]: + r"""Return a callable for the update entry method over gRPC. + + Updates an Entry. + + Returns: + Callable[[~.UpdateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', + request_serializer=catalog.UpdateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + catalog.Entry]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an Entry. + + Returns: + Callable[[~.DeleteEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', + request_serializer=catalog.DeleteEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['delete_entry'] + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + catalog.ListEntriesResponse]: + r"""Return a callable for the list entries method over gRPC. + + Lists Entries within an EntryGroup. + + Returns: + Callable[[~.ListEntriesRequest], + ~.ListEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntries', + request_serializer=catalog.ListEntriesRequest.serialize, + response_deserializer=catalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + catalog.Entry]: + r"""Return a callable for the get entry method over gRPC. + + Gets an Entry. + + Returns: + Callable[[~.GetEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntry', + request_serializer=catalog.GetEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + catalog.Entry]: + r"""Return a callable for the lookup entry method over gRPC. + + Looks up an entry by name using the permission on the + source system. + + Returns: + Callable[[~.LookupEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/LookupEntry', + request_serializer=catalog.LookupEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + catalog.SearchEntriesResponse]: + r"""Return a callable for the search entries method over gRPC. + + Searches for Entries matching the given query and + scope. + + Returns: + Callable[[~.SearchEntriesRequest], + ~.SearchEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_entries' not in self._stubs: + self._stubs['search_entries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/SearchEntries', + request_serializer=catalog.SearchEntriesRequest.serialize, + response_deserializer=catalog.SearchEntriesResponse.deserialize, + ) + return self._stubs['search_entries'] + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + operations_pb2.Operation]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import metadata from a third-party system into + Dataplex Universal Catalog. + + Returns: + Callable[[~.CreateMetadataJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metadata_job' not in self._stubs: + self._stubs['create_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_metadata_job'] + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + catalog.MetadataJob]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + ~.MetadataJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata_job' not in self._stubs: + self._stubs['get_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs['get_metadata_job'] + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + catalog.ListMetadataJobsResponse]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + ~.ListMetadataJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metadata_jobs' not in self._stubs: + self._stubs['list_metadata_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs['list_metadata_jobs'] + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_metadata_job' not in self._stubs: + self._stubs['cancel_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_metadata_job'] + + @property + def create_entry_link(self) -> Callable[ + [catalog.CreateEntryLinkRequest], + catalog.EntryLink]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_link' not in self._stubs: + self._stubs['create_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryLink', + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['create_entry_link'] + + @property + def delete_entry_link(self) -> Callable[ + [catalog.DeleteEntryLinkRequest], + catalog.EntryLink]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_link' not in self._stubs: + self._stubs['delete_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink', + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['delete_entry_link'] + + @property + def get_entry_link(self) -> Callable[ + [catalog.GetEntryLinkRequest], + catalog.EntryLink]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_link' not in self._stubs: + self._stubs['get_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryLink', + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['get_entry_link'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CatalogServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1a45c31c78a0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -0,0 +1,1521 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import catalog +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import CatalogServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): + """gRPC AsyncIO backend transport for CatalogService. + + The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create entry type method over gRPC. + + Creates an EntryType. + + Returns: + Callable[[~.CreateEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_type' not in self._stubs: + self._stubs['create_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', + request_serializer=catalog.CreateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_type'] + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update entry type method over gRPC. + + Updates an EntryType. + + Returns: + Callable[[~.UpdateEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_type' not in self._stubs: + self._stubs['update_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', + request_serializer=catalog.UpdateEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_type'] + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete entry type method over gRPC. + + Deletes an EntryType. + + Returns: + Callable[[~.DeleteEntryTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_type' not in self._stubs: + self._stubs['delete_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', + request_serializer=catalog.DeleteEntryTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_type'] + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + Awaitable[catalog.ListEntryTypesResponse]]: + r"""Return a callable for the list entry types method over gRPC. + + Lists EntryType resources in a project and location. + + Returns: + Callable[[~.ListEntryTypesRequest], + Awaitable[~.ListEntryTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_types' not in self._stubs: + self._stubs['list_entry_types'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', + request_serializer=catalog.ListEntryTypesRequest.serialize, + response_deserializer=catalog.ListEntryTypesResponse.deserialize, + ) + return self._stubs['list_entry_types'] + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + Awaitable[catalog.EntryType]]: + r"""Return a callable for the get entry type method over gRPC. + + Gets an EntryType. + + Returns: + Callable[[~.GetEntryTypeRequest], + Awaitable[~.EntryType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_type' not in self._stubs: + self._stubs['get_entry_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryType', + request_serializer=catalog.GetEntryTypeRequest.serialize, + response_deserializer=catalog.EntryType.deserialize, + ) + return self._stubs['get_entry_type'] + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create aspect type method over gRPC. + + Creates an AspectType. + + Returns: + Callable[[~.CreateAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_aspect_type' not in self._stubs: + self._stubs['create_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', + request_serializer=catalog.CreateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_aspect_type'] + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update aspect type method over gRPC. + + Updates an AspectType. + + Returns: + Callable[[~.UpdateAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_aspect_type' not in self._stubs: + self._stubs['update_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', + request_serializer=catalog.UpdateAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_aspect_type'] + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete aspect type method over gRPC. + + Deletes an AspectType. + + Returns: + Callable[[~.DeleteAspectTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_aspect_type' not in self._stubs: + self._stubs['delete_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', + request_serializer=catalog.DeleteAspectTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_aspect_type'] + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + Awaitable[catalog.ListAspectTypesResponse]]: + r"""Return a callable for the list aspect types method over gRPC. + + Lists AspectType resources in a project and location. + + Returns: + Callable[[~.ListAspectTypesRequest], + Awaitable[~.ListAspectTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_aspect_types' not in self._stubs: + self._stubs['list_aspect_types'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', + request_serializer=catalog.ListAspectTypesRequest.serialize, + response_deserializer=catalog.ListAspectTypesResponse.deserialize, + ) + return self._stubs['list_aspect_types'] + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + Awaitable[catalog.AspectType]]: + r"""Return a callable for the get aspect type method over gRPC. + + Gets an AspectType. + + Returns: + Callable[[~.GetAspectTypeRequest], + Awaitable[~.AspectType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_aspect_type' not in self._stubs: + self._stubs['get_aspect_type'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetAspectType', + request_serializer=catalog.GetAspectTypeRequest.serialize, + response_deserializer=catalog.AspectType.deserialize, + ) + return self._stubs['get_aspect_type'] + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an EntryGroup. + + Returns: + Callable[[~.CreateEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', + request_serializer=catalog.CreateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', + request_serializer=catalog.UpdateEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', + request_serializer=catalog.DeleteEntryGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + Awaitable[catalog.ListEntryGroupsResponse]]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists EntryGroup resources in a project and location. + + Returns: + Callable[[~.ListEntryGroupsRequest], + Awaitable[~.ListEntryGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', + request_serializer=catalog.ListEntryGroupsRequest.serialize, + response_deserializer=catalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + Awaitable[catalog.EntryGroup]]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', + request_serializer=catalog.GetEntryGroupRequest.serialize, + response_deserializer=catalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the create entry method over gRPC. + + Creates an Entry. + + Returns: + Callable[[~.CreateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntry', + request_serializer=catalog.CreateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the update entry method over gRPC. + + Updates an Entry. + + Returns: + Callable[[~.UpdateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', + request_serializer=catalog.UpdateEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an Entry. + + Returns: + Callable[[~.DeleteEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', + request_serializer=catalog.DeleteEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['delete_entry'] + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + Awaitable[catalog.ListEntriesResponse]]: + r"""Return a callable for the list entries method over gRPC. + + Lists Entries within an EntryGroup. + + Returns: + Callable[[~.ListEntriesRequest], + Awaitable[~.ListEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListEntries', + request_serializer=catalog.ListEntriesRequest.serialize, + response_deserializer=catalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the get entry method over gRPC. + + Gets an Entry. + + Returns: + Callable[[~.GetEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntry', + request_serializer=catalog.GetEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + Awaitable[catalog.Entry]]: + r"""Return a callable for the lookup entry method over gRPC. + + Looks up an entry by name using the permission on the + source system. + + Returns: + Callable[[~.LookupEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/LookupEntry', + request_serializer=catalog.LookupEntryRequest.serialize, + response_deserializer=catalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + Awaitable[catalog.SearchEntriesResponse]]: + r"""Return a callable for the search entries method over gRPC. + + Searches for Entries matching the given query and + scope. + + Returns: + Callable[[~.SearchEntriesRequest], + Awaitable[~.SearchEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_entries' not in self._stubs: + self._stubs['search_entries'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/SearchEntries', + request_serializer=catalog.SearchEntriesRequest.serialize, + response_deserializer=catalog.SearchEntriesResponse.deserialize, + ) + return self._stubs['search_entries'] + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import metadata from a third-party system into + Dataplex Universal Catalog. + + Returns: + Callable[[~.CreateMetadataJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_metadata_job' not in self._stubs: + self._stubs['create_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_metadata_job'] + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + Awaitable[catalog.MetadataJob]]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + Awaitable[~.MetadataJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_metadata_job' not in self._stubs: + self._stubs['get_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs['get_metadata_job'] + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + Awaitable[catalog.ListMetadataJobsResponse]]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + Awaitable[~.ListMetadataJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_metadata_jobs' not in self._stubs: + self._stubs['list_metadata_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs['list_metadata_jobs'] + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_metadata_job' not in self._stubs: + self._stubs['cancel_metadata_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_metadata_job'] + + @property + def create_entry_link(self) -> Callable[ + [catalog.CreateEntryLinkRequest], + Awaitable[catalog.EntryLink]]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_link' not in self._stubs: + self._stubs['create_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/CreateEntryLink', + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['create_entry_link'] + + @property + def delete_entry_link(self) -> Callable[ + [catalog.DeleteEntryLinkRequest], + Awaitable[catalog.EntryLink]]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_link' not in self._stubs: + self._stubs['delete_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink', + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['delete_entry_link'] + + @property + def get_entry_link(self) -> Callable[ + [catalog.GetEntryLinkRequest], + Awaitable[catalog.EntryLink]]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_link' not in self._stubs: + self._stubs['get_entry_link'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CatalogService/GetEntryLink', + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs['get_entry_link'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entry_type: self._wrap_method( + self.create_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_type: self._wrap_method( + self.update_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_type: self._wrap_method( + self.delete_entry_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_types: self._wrap_method( + self.list_entry_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_type: self._wrap_method( + self.get_entry_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_aspect_type: self._wrap_method( + self.create_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_aspect_type: self._wrap_method( + self.update_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_aspect_type: self._wrap_method( + self.delete_aspect_type, + default_timeout=60.0, + client_info=client_info, + ), + self.list_aspect_types: self._wrap_method( + self.list_aspect_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aspect_type: self._wrap_method( + self.get_aspect_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry_group: self._wrap_method( + self.create_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry_group: self._wrap_method( + self.update_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry_group: self._wrap_method( + self.delete_entry_group, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entry_groups: self._wrap_method( + self.list_entry_groups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_entry_group: self._wrap_method( + self.get_entry_group, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_entry: self._wrap_method( + self.create_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entry: self._wrap_method( + self.update_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entry: self._wrap_method( + self.delete_entry, + default_timeout=60.0, + client_info=client_info, + ), + self.list_entries: self._wrap_method( + self.list_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_entry: self._wrap_method( + self.get_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lookup_entry: self._wrap_method( + self.lookup_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.search_entries: self._wrap_method( + self.search_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_metadata_job: self._wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: self._wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: self._wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: self._wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.create_entry_link: self._wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: self._wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: self._wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'CatalogServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py new file mode 100644 index 000000000000..2cf3dbe13573 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py @@ -0,0 +1,6010 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import catalog +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseCatalogServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class CatalogServiceRestInterceptor: + """Interceptor for CatalogService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CatalogServiceRestTransport. + + .. code-block:: python + class MyCustomCatalogServiceInterceptor(CatalogServiceRestInterceptor): + def pre_cancel_metadata_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_aspect_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_aspect_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entry_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entry_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_metadata_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_metadata_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_aspect_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_aspect_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entry_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entry_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_aspect_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_aspect_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entry_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entry_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_metadata_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_metadata_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_aspect_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_aspect_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entry_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entry_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entry_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entry_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_metadata_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_metadata_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_aspect_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_aspect_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entry_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entry_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entry_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entry_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CatalogServiceRestTransport(interceptor=MyCustomCatalogServiceInterceptor()) + client = CatalogServiceClient(transport=transport) + + + """ + def pre_cancel_metadata_job(self, request: catalog.CancelMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CancelMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_metadata_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def pre_create_aspect_type(self, request: catalog.CreateAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_aspect_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_aspect_type + + DEPRECATED. Please use the `post_create_aspect_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_aspect_type` interceptor runs + before the `post_create_aspect_type_with_metadata` interceptor. + """ + return response + + def post_create_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_aspect_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_aspect_type_with_metadata` + interceptor in new development instead of the `post_create_aspect_type` interceptor. + When both interceptors are used, this `post_create_aspect_type_with_metadata` interceptor runs after the + `post_create_aspect_type` interceptor. The (possibly modified) response returned by + `post_create_aspect_type` will be passed to + `post_create_aspect_type_with_metadata`. + """ + return response, metadata + + def pre_create_entry(self, request: catalog.CreateEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry(self, response: catalog.Entry) -> catalog.Entry: + """Post-rpc interceptor for create_entry + + DEPRECATED. Please use the `post_create_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry` interceptor runs + before the `post_create_entry_with_metadata` interceptor. + """ + return response + + def post_create_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_with_metadata` + interceptor in new development instead of the `post_create_entry` interceptor. + When both interceptors are used, this `post_create_entry_with_metadata` interceptor runs after the + `post_create_entry` interceptor. The (possibly modified) response returned by + `post_create_entry` will be passed to + `post_create_entry_with_metadata`. + """ + return response, metadata + + def pre_create_entry_group(self, request: catalog.CreateEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_entry_group + + DEPRECATED. Please use the `post_create_entry_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry_group` interceptor runs + before the `post_create_entry_group_with_metadata` interceptor. + """ + return response + + def post_create_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_group_with_metadata` + interceptor in new development instead of the `post_create_entry_group` interceptor. + When both interceptors are used, this `post_create_entry_group_with_metadata` interceptor runs after the + `post_create_entry_group` interceptor. The (possibly modified) response returned by + `post_create_entry_group` will be passed to + `post_create_entry_group_with_metadata`. + """ + return response, metadata + + def pre_create_entry_link(self, request: catalog.CreateEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for create_entry_link + + DEPRECATED. Please use the `post_create_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry_link` interceptor runs + before the `post_create_entry_link_with_metadata` interceptor. + """ + return response + + def post_create_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_link_with_metadata` + interceptor in new development instead of the `post_create_entry_link` interceptor. + When both interceptors are used, this `post_create_entry_link_with_metadata` interceptor runs after the + `post_create_entry_link` interceptor. The (possibly modified) response returned by + `post_create_entry_link` will be passed to + `post_create_entry_link_with_metadata`. + """ + return response, metadata + + def pre_create_entry_type(self, request: catalog.CreateEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_entry_type + + DEPRECATED. Please use the `post_create_entry_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry_type` interceptor runs + before the `post_create_entry_type_with_metadata` interceptor. + """ + return response + + def post_create_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_type_with_metadata` + interceptor in new development instead of the `post_create_entry_type` interceptor. + When both interceptors are used, this `post_create_entry_type_with_metadata` interceptor runs after the + `post_create_entry_type` interceptor. The (possibly modified) response returned by + `post_create_entry_type` will be passed to + `post_create_entry_type_with_metadata`. + """ + return response, metadata + + def pre_create_metadata_job(self, request: catalog.CreateMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_metadata_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_metadata_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_metadata_job + + DEPRECATED. Please use the `post_create_metadata_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_metadata_job` interceptor runs + before the `post_create_metadata_job_with_metadata` interceptor. + """ + return response + + def post_create_metadata_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_metadata_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_metadata_job_with_metadata` + interceptor in new development instead of the `post_create_metadata_job` interceptor. + When both interceptors are used, this `post_create_metadata_job_with_metadata` interceptor runs after the + `post_create_metadata_job` interceptor. The (possibly modified) response returned by + `post_create_metadata_job` will be passed to + `post_create_metadata_job_with_metadata`. + """ + return response, metadata + + def pre_delete_aspect_type(self, request: catalog.DeleteAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_aspect_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_aspect_type + + DEPRECATED. Please use the `post_delete_aspect_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_aspect_type` interceptor runs + before the `post_delete_aspect_type_with_metadata` interceptor. + """ + return response + + def post_delete_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_aspect_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_aspect_type_with_metadata` + interceptor in new development instead of the `post_delete_aspect_type` interceptor. + When both interceptors are used, this `post_delete_aspect_type_with_metadata` interceptor runs after the + `post_delete_aspect_type` interceptor. The (possibly modified) response returned by + `post_delete_aspect_type` will be passed to + `post_delete_aspect_type_with_metadata`. + """ + return response, metadata + + def pre_delete_entry(self, request: catalog.DeleteEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry(self, response: catalog.Entry) -> catalog.Entry: + """Post-rpc interceptor for delete_entry + + DEPRECATED. Please use the `post_delete_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry` interceptor runs + before the `post_delete_entry_with_metadata` interceptor. + """ + return response + + def post_delete_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_with_metadata` + interceptor in new development instead of the `post_delete_entry` interceptor. + When both interceptors are used, this `post_delete_entry_with_metadata` interceptor runs after the + `post_delete_entry` interceptor. The (possibly modified) response returned by + `post_delete_entry` will be passed to + `post_delete_entry_with_metadata`. + """ + return response, metadata + + def pre_delete_entry_group(self, request: catalog.DeleteEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_entry_group + + DEPRECATED. Please use the `post_delete_entry_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry_group` interceptor runs + before the `post_delete_entry_group_with_metadata` interceptor. + """ + return response + + def post_delete_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_group_with_metadata` + interceptor in new development instead of the `post_delete_entry_group` interceptor. + When both interceptors are used, this `post_delete_entry_group_with_metadata` interceptor runs after the + `post_delete_entry_group` interceptor. The (possibly modified) response returned by + `post_delete_entry_group` will be passed to + `post_delete_entry_group_with_metadata`. + """ + return response, metadata + + def pre_delete_entry_link(self, request: catalog.DeleteEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for delete_entry_link + + DEPRECATED. Please use the `post_delete_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry_link` interceptor runs + before the `post_delete_entry_link_with_metadata` interceptor. + """ + return response + + def post_delete_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_link_with_metadata` + interceptor in new development instead of the `post_delete_entry_link` interceptor. + When both interceptors are used, this `post_delete_entry_link_with_metadata` interceptor runs after the + `post_delete_entry_link` interceptor. The (possibly modified) response returned by + `post_delete_entry_link` will be passed to + `post_delete_entry_link_with_metadata`. + """ + return response, metadata + + def pre_delete_entry_type(self, request: catalog.DeleteEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_entry_type + + DEPRECATED. Please use the `post_delete_entry_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry_type` interceptor runs + before the `post_delete_entry_type_with_metadata` interceptor. + """ + return response + + def post_delete_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_type_with_metadata` + interceptor in new development instead of the `post_delete_entry_type` interceptor. + When both interceptors are used, this `post_delete_entry_type_with_metadata` interceptor runs after the + `post_delete_entry_type` interceptor. The (possibly modified) response returned by + `post_delete_entry_type` will be passed to + `post_delete_entry_type_with_metadata`. + """ + return response, metadata + + def pre_get_aspect_type(self, request: catalog.GetAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_aspect_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_aspect_type(self, response: catalog.AspectType) -> catalog.AspectType: + """Post-rpc interceptor for get_aspect_type + + DEPRECATED. Please use the `post_get_aspect_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_aspect_type` interceptor runs + before the `post_get_aspect_type_with_metadata` interceptor. + """ + return response + + def post_get_aspect_type_with_metadata(self, response: catalog.AspectType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.AspectType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aspect_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_aspect_type_with_metadata` + interceptor in new development instead of the `post_get_aspect_type` interceptor. + When both interceptors are used, this `post_get_aspect_type_with_metadata` interceptor runs after the + `post_get_aspect_type` interceptor. The (possibly modified) response returned by + `post_get_aspect_type` will be passed to + `post_get_aspect_type_with_metadata`. + """ + return response, metadata + + def pre_get_entry(self, request: catalog.GetEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry(self, response: catalog.Entry) -> catalog.Entry: + """Post-rpc interceptor for get_entry + + DEPRECATED. Please use the `post_get_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry` interceptor runs + before the `post_get_entry_with_metadata` interceptor. + """ + return response + + def post_get_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_with_metadata` + interceptor in new development instead of the `post_get_entry` interceptor. + When both interceptors are used, this `post_get_entry_with_metadata` interceptor runs after the + `post_get_entry` interceptor. The (possibly modified) response returned by + `post_get_entry` will be passed to + `post_get_entry_with_metadata`. + """ + return response, metadata + + def pre_get_entry_group(self, request: catalog.GetEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry_group(self, response: catalog.EntryGroup) -> catalog.EntryGroup: + """Post-rpc interceptor for get_entry_group + + DEPRECATED. Please use the `post_get_entry_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry_group` interceptor runs + before the `post_get_entry_group_with_metadata` interceptor. + """ + return response + + def post_get_entry_group_with_metadata(self, response: catalog.EntryGroup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_group_with_metadata` + interceptor in new development instead of the `post_get_entry_group` interceptor. + When both interceptors are used, this `post_get_entry_group_with_metadata` interceptor runs after the + `post_get_entry_group` interceptor. The (possibly modified) response returned by + `post_get_entry_group` will be passed to + `post_get_entry_group_with_metadata`. + """ + return response, metadata + + def pre_get_entry_link(self, request: catalog.GetEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for get_entry_link + + DEPRECATED. Please use the `post_get_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry_link` interceptor runs + before the `post_get_entry_link_with_metadata` interceptor. + """ + return response + + def post_get_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_link_with_metadata` + interceptor in new development instead of the `post_get_entry_link` interceptor. + When both interceptors are used, this `post_get_entry_link_with_metadata` interceptor runs after the + `post_get_entry_link` interceptor. The (possibly modified) response returned by + `post_get_entry_link` will be passed to + `post_get_entry_link_with_metadata`. + """ + return response, metadata + + def pre_get_entry_type(self, request: catalog.GetEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry_type(self, response: catalog.EntryType) -> catalog.EntryType: + """Post-rpc interceptor for get_entry_type + + DEPRECATED. Please use the `post_get_entry_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry_type` interceptor runs + before the `post_get_entry_type_with_metadata` interceptor. + """ + return response + + def post_get_entry_type_with_metadata(self, response: catalog.EntryType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_type_with_metadata` + interceptor in new development instead of the `post_get_entry_type` interceptor. + When both interceptors are used, this `post_get_entry_type_with_metadata` interceptor runs after the + `post_get_entry_type` interceptor. The (possibly modified) response returned by + `post_get_entry_type` will be passed to + `post_get_entry_type_with_metadata`. + """ + return response, metadata + + def pre_get_metadata_job(self, request: catalog.GetMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_metadata_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_metadata_job(self, response: catalog.MetadataJob) -> catalog.MetadataJob: + """Post-rpc interceptor for get_metadata_job + + DEPRECATED. Please use the `post_get_metadata_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_metadata_job` interceptor runs + before the `post_get_metadata_job_with_metadata` interceptor. + """ + return response + + def post_get_metadata_job_with_metadata(self, response: catalog.MetadataJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.MetadataJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_metadata_job_with_metadata` + interceptor in new development instead of the `post_get_metadata_job` interceptor. + When both interceptors are used, this `post_get_metadata_job_with_metadata` interceptor runs after the + `post_get_metadata_job` interceptor. The (possibly modified) response returned by + `post_get_metadata_job` will be passed to + `post_get_metadata_job_with_metadata`. + """ + return response, metadata + + def pre_list_aspect_types(self, request: catalog.ListAspectTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListAspectTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_aspect_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_aspect_types(self, response: catalog.ListAspectTypesResponse) -> catalog.ListAspectTypesResponse: + """Post-rpc interceptor for list_aspect_types + + DEPRECATED. Please use the `post_list_aspect_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_list_aspect_types` interceptor runs + before the `post_list_aspect_types_with_metadata` interceptor. + """ + return response + + def post_list_aspect_types_with_metadata(self, response: catalog.ListAspectTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListAspectTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_aspect_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_list_aspect_types_with_metadata` + interceptor in new development instead of the `post_list_aspect_types` interceptor. + When both interceptors are used, this `post_list_aspect_types_with_metadata` interceptor runs after the + `post_list_aspect_types` interceptor. The (possibly modified) response returned by + `post_list_aspect_types` will be passed to + `post_list_aspect_types_with_metadata`. + """ + return response, metadata + + def pre_list_entries(self, request: catalog.ListEntriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_entries(self, response: catalog.ListEntriesResponse) -> catalog.ListEntriesResponse: + """Post-rpc interceptor for list_entries + + DEPRECATED. Please use the `post_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_list_entries` interceptor runs + before the `post_list_entries_with_metadata` interceptor. + """ + return response + + def post_list_entries_with_metadata(self, response: catalog.ListEntriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_list_entries_with_metadata` + interceptor in new development instead of the `post_list_entries` interceptor. + When both interceptors are used, this `post_list_entries_with_metadata` interceptor runs after the + `post_list_entries` interceptor. The (possibly modified) response returned by + `post_list_entries` will be passed to + `post_list_entries_with_metadata`. + """ + return response, metadata + + def pre_list_entry_groups(self, request: catalog.ListEntryGroupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryGroupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_entry_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_entry_groups(self, response: catalog.ListEntryGroupsResponse) -> catalog.ListEntryGroupsResponse: + """Post-rpc interceptor for list_entry_groups + + DEPRECATED. Please use the `post_list_entry_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_list_entry_groups` interceptor runs + before the `post_list_entry_groups_with_metadata` interceptor. + """ + return response + + def post_list_entry_groups_with_metadata(self, response: catalog.ListEntryGroupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_entry_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_list_entry_groups_with_metadata` + interceptor in new development instead of the `post_list_entry_groups` interceptor. + When both interceptors are used, this `post_list_entry_groups_with_metadata` interceptor runs after the + `post_list_entry_groups` interceptor. The (possibly modified) response returned by + `post_list_entry_groups` will be passed to + `post_list_entry_groups_with_metadata`. + """ + return response, metadata + + def pre_list_entry_types(self, request: catalog.ListEntryTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_entry_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_entry_types(self, response: catalog.ListEntryTypesResponse) -> catalog.ListEntryTypesResponse: + """Post-rpc interceptor for list_entry_types + + DEPRECATED. Please use the `post_list_entry_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_list_entry_types` interceptor runs + before the `post_list_entry_types_with_metadata` interceptor. + """ + return response + + def post_list_entry_types_with_metadata(self, response: catalog.ListEntryTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_entry_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_list_entry_types_with_metadata` + interceptor in new development instead of the `post_list_entry_types` interceptor. + When both interceptors are used, this `post_list_entry_types_with_metadata` interceptor runs after the + `post_list_entry_types` interceptor. The (possibly modified) response returned by + `post_list_entry_types` will be passed to + `post_list_entry_types_with_metadata`. + """ + return response, metadata + + def pre_list_metadata_jobs(self, request: catalog.ListMetadataJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListMetadataJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_metadata_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_metadata_jobs(self, response: catalog.ListMetadataJobsResponse) -> catalog.ListMetadataJobsResponse: + """Post-rpc interceptor for list_metadata_jobs + + DEPRECATED. Please use the `post_list_metadata_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_list_metadata_jobs` interceptor runs + before the `post_list_metadata_jobs_with_metadata` interceptor. + """ + return response + + def post_list_metadata_jobs_with_metadata(self, response: catalog.ListMetadataJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListMetadataJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_metadata_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_list_metadata_jobs_with_metadata` + interceptor in new development instead of the `post_list_metadata_jobs` interceptor. + When both interceptors are used, this `post_list_metadata_jobs_with_metadata` interceptor runs after the + `post_list_metadata_jobs` interceptor. The (possibly modified) response returned by + `post_list_metadata_jobs` will be passed to + `post_list_metadata_jobs_with_metadata`. + """ + return response, metadata + + def pre_lookup_entry(self, request: catalog.LookupEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.LookupEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for lookup_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_lookup_entry(self, response: catalog.Entry) -> catalog.Entry: + """Post-rpc interceptor for lookup_entry + + DEPRECATED. Please use the `post_lookup_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_lookup_entry` interceptor runs + before the `post_lookup_entry_with_metadata` interceptor. + """ + return response + + def post_lookup_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_lookup_entry_with_metadata` + interceptor in new development instead of the `post_lookup_entry` interceptor. + When both interceptors are used, this `post_lookup_entry_with_metadata` interceptor runs after the + `post_lookup_entry` interceptor. The (possibly modified) response returned by + `post_lookup_entry` will be passed to + `post_lookup_entry_with_metadata`. + """ + return response, metadata + + def pre_search_entries(self, request: catalog.SearchEntriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.SearchEntriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for search_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_search_entries(self, response: catalog.SearchEntriesResponse) -> catalog.SearchEntriesResponse: + """Post-rpc interceptor for search_entries + + DEPRECATED. Please use the `post_search_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_search_entries` interceptor runs + before the `post_search_entries_with_metadata` interceptor. + """ + return response + + def post_search_entries_with_metadata(self, response: catalog.SearchEntriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.SearchEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_search_entries_with_metadata` + interceptor in new development instead of the `post_search_entries` interceptor. + When both interceptors are used, this `post_search_entries_with_metadata` interceptor runs after the + `post_search_entries` interceptor. The (possibly modified) response returned by + `post_search_entries` will be passed to + `post_search_entries_with_metadata`. + """ + return response, metadata + + def pre_update_aspect_type(self, request: catalog.UpdateAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_aspect_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_update_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_aspect_type + + DEPRECATED. Please use the `post_update_aspect_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_update_aspect_type` interceptor runs + before the `post_update_aspect_type_with_metadata` interceptor. + """ + return response + + def post_update_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_aspect_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_update_aspect_type_with_metadata` + interceptor in new development instead of the `post_update_aspect_type` interceptor. + When both interceptors are used, this `post_update_aspect_type_with_metadata` interceptor runs after the + `post_update_aspect_type` interceptor. The (possibly modified) response returned by + `post_update_aspect_type` will be passed to + `post_update_aspect_type_with_metadata`. + """ + return response, metadata + + def pre_update_entry(self, request: catalog.UpdateEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_update_entry(self, response: catalog.Entry) -> catalog.Entry: + """Post-rpc interceptor for update_entry + + DEPRECATED. Please use the `post_update_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_update_entry` interceptor runs + before the `post_update_entry_with_metadata` interceptor. + """ + return response + + def post_update_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_update_entry_with_metadata` + interceptor in new development instead of the `post_update_entry` interceptor. + When both interceptors are used, this `post_update_entry_with_metadata` interceptor runs after the + `post_update_entry` interceptor. The (possibly modified) response returned by + `post_update_entry` will be passed to + `post_update_entry_with_metadata`. + """ + return response, metadata + + def pre_update_entry_group(self, request: catalog.UpdateEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_entry_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_update_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_entry_group + + DEPRECATED. Please use the `post_update_entry_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_update_entry_group` interceptor runs + before the `post_update_entry_group_with_metadata` interceptor. + """ + return response + + def post_update_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entry_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_update_entry_group_with_metadata` + interceptor in new development instead of the `post_update_entry_group` interceptor. + When both interceptors are used, this `post_update_entry_group_with_metadata` interceptor runs after the + `post_update_entry_group` interceptor. The (possibly modified) response returned by + `post_update_entry_group` will be passed to + `post_update_entry_group_with_metadata`. + """ + return response, metadata + + def pre_update_entry_type(self, request: catalog.UpdateEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_entry_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_update_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_entry_type + + DEPRECATED. Please use the `post_update_entry_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_update_entry_type` interceptor runs + before the `post_update_entry_type_with_metadata` interceptor. + """ + return response + + def post_update_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entry_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_update_entry_type_with_metadata` + interceptor in new development instead of the `post_update_entry_type` interceptor. + When both interceptors are used, this `post_update_entry_type_with_metadata` interceptor runs after the + `post_update_entry_type` interceptor. The (possibly modified) response returned by + `post_update_entry_type` will be passed to + `post_update_entry_type_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CatalogServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CatalogServiceRestInterceptor + + +class CatalogServiceRestTransport(_BaseCatalogServiceRestTransport): + """REST backend synchronous transport for CatalogService. + + The primary resources offered by this service are + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud + Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CatalogServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CatalogServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CancelMetadataJob(_BaseCatalogServiceRestTransport._BaseCancelMetadataJob, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CancelMetadataJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CancelMetadataJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the cancel metadata job method over HTTP. + + Args: + request (~.catalog.CancelMetadataJobRequest): + The request object. Cancel metadata job request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_http_options() + + request, metadata = self._interceptor.pre_cancel_metadata_job(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CancelMetadataJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CancelMetadataJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CancelMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateAspectType(_BaseCatalogServiceRestTransport._BaseCreateAspectType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateAspectType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateAspectTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create aspect type method over HTTP. + + Args: + request (~.catalog.CreateAspectTypeRequest): + The request object. Create AspectType Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_http_options() + + request, metadata = self._interceptor.pre_create_aspect_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateAspectType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateAspectType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_aspect_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_aspect_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateAspectType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEntry(_BaseCatalogServiceRestTransport._BaseCreateEntry, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntry") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateEntryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.Entry: + r"""Call the create entry method over HTTP. + + Args: + request (~.catalog.CreateEntryRequest): + The request object. Create Entry request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_http_options() + + request, metadata = self._interceptor.pre_create_entry(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntry", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.Entry() + pb_resp = catalog.Entry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.Entry.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntry", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEntryGroup(_BaseCatalogServiceRestTransport._BaseCreateEntryGroup, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntryGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateEntryGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create entry group method over HTTP. + + Args: + request (~.catalog.CreateEntryGroupRequest): + The request object. Create EntryGroup Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_http_options() + + request, metadata = self._interceptor.pre_create_entry_group(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryGroup", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_group_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEntryLink(_BaseCatalogServiceRestTransport._BaseCreateEntryLink, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateEntryLinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.EntryLink: + r"""Call the create entry link method over HTTP. + + Args: + request (~.catalog.CreateEntryLinkRequest): + The request object. Request message for CreateEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_http_options() + + request, metadata = self._interceptor.pre_create_entry_link(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryLink", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_link_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEntryType(_BaseCatalogServiceRestTransport._BaseCreateEntryType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntryType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateEntryTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create entry type method over HTTP. + + Args: + request (~.catalog.CreateEntryTypeRequest): + The request object. Create EntryType Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_http_options() + + request, metadata = self._interceptor.pre_create_entry_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateMetadataJob(_BaseCatalogServiceRestTransport._BaseCreateMetadataJob, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateMetadataJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.CreateMetadataJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create metadata job method over HTTP. + + Args: + request (~.catalog.CreateMetadataJobRequest): + The request object. Create metadata job request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_http_options() + + request, metadata = self._interceptor.pre_create_metadata_job(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateMetadataJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateMetadataJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_metadata_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_metadata_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateMetadataJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAspectType(_BaseCatalogServiceRestTransport._BaseDeleteAspectType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteAspectType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.DeleteAspectTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete aspect type method over HTTP. + + Args: + request (~.catalog.DeleteAspectTypeRequest): + The request object. Delete AspectType Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_http_options() + + request, metadata = self._interceptor.pre_delete_aspect_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteAspectType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteAspectType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_aspect_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_aspect_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteAspectType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEntry(_BaseCatalogServiceRestTransport._BaseDeleteEntry, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntry") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.DeleteEntryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.Entry: + r"""Call the delete entry method over HTTP. + + Args: + request (~.catalog.DeleteEntryRequest): + The request object. Delete Entry request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_http_options() + + request, metadata = self._interceptor.pre_delete_entry(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntry", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.Entry() + pb_resp = catalog.Entry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.Entry.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntry", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEntryGroup(_BaseCatalogServiceRestTransport._BaseDeleteEntryGroup, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntryGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.DeleteEntryGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete entry group method over HTTP. + + Args: + request (~.catalog.DeleteEntryGroupRequest): + The request object. Delete EntryGroup Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_http_options() + + request, metadata = self._interceptor.pre_delete_entry_group(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryGroup", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_group_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEntryLink(_BaseCatalogServiceRestTransport._BaseDeleteEntryLink, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.DeleteEntryLinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.EntryLink: + r"""Call the delete entry link method over HTTP. + + Args: + request (~.catalog.DeleteEntryLinkRequest): + The request object. Request message for DeleteEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_http_options() + + request, metadata = self._interceptor.pre_delete_entry_link(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryLink", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_link_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEntryType(_BaseCatalogServiceRestTransport._BaseDeleteEntryType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntryType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.DeleteEntryTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete entry type method over HTTP. + + Args: + request (~.catalog.DeleteEntryTypeRequest): + The request object. Delete EntryType Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_http_options() + + request, metadata = self._interceptor.pre_delete_entry_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAspectType(_BaseCatalogServiceRestTransport._BaseGetAspectType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetAspectType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetAspectTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.AspectType: + r"""Call the get aspect type method over HTTP. + + Args: + request (~.catalog.GetAspectTypeRequest): + The request object. Get AspectType request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.AspectType: + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_http_options() + + request, metadata = self._interceptor.pre_get_aspect_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetAspectType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetAspectType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.AspectType() + pb_resp = catalog.AspectType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_aspect_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aspect_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.AspectType.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetAspectType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEntry(_BaseCatalogServiceRestTransport._BaseGetEntry, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntry") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetEntryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.Entry: + r"""Call the get entry method over HTTP. + + Args: + request (~.catalog.GetEntryRequest): + The request object. Get Entry request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetEntry._get_http_options() + + request, metadata = self._interceptor.pre_get_entry(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntry._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntry._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntry", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.Entry() + pb_resp = catalog.Entry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.Entry.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntry", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEntryGroup(_BaseCatalogServiceRestTransport._BaseGetEntryGroup, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntryGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetEntryGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.EntryGroup: + r"""Call the get entry group method over HTTP. + + Args: + request (~.catalog.GetEntryGroupRequest): + The request object. Get EntryGroup request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryGroup: + An Entry Group represents a logical + grouping of one or more Entries. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_http_options() + + request, metadata = self._interceptor.pre_get_entry_group(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryGroup", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryGroup() + pb_resp = catalog.EntryGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_group_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.EntryGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEntryLink(_BaseCatalogServiceRestTransport._BaseGetEntryLink, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetEntryLinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.EntryLink: + r"""Call the get entry link method over HTTP. + + Args: + request (~.catalog.GetEntryLinkRequest): + The request object. Request message for GetEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_http_options() + + request, metadata = self._interceptor.pre_get_entry_link(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryLink", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_link_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEntryType(_BaseCatalogServiceRestTransport._BaseGetEntryType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntryType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetEntryTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.EntryType: + r"""Call the get entry type method over HTTP. + + Args: + request (~.catalog.GetEntryTypeRequest): + The request object. Get EntryType request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryType: + Entry Type is a template for creating + Entries. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_http_options() + + request, metadata = self._interceptor.pre_get_entry_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryType() + pb_resp = catalog.EntryType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.EntryType.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMetadataJob(_BaseCatalogServiceRestTransport._BaseGetMetadataJob, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetMetadataJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.GetMetadataJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.MetadataJob: + r"""Call the get metadata job method over HTTP. + + Args: + request (~.catalog.GetMetadataJobRequest): + The request object. Get metadata job request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.MetadataJob: + A metadata job resource. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_http_options() + + request, metadata = self._interceptor.pre_get_metadata_job(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetMetadataJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetMetadataJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.MetadataJob() + pb_resp = catalog.MetadataJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_metadata_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.MetadataJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetMetadataJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAspectTypes(_BaseCatalogServiceRestTransport._BaseListAspectTypes, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListAspectTypes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.ListAspectTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.ListAspectTypesResponse: + r"""Call the list aspect types method over HTTP. + + Args: + request (~.catalog.ListAspectTypesRequest): + The request object. List AspectTypes request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.ListAspectTypesResponse: + List AspectTypes response. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_http_options() + + request, metadata = self._interceptor.pre_list_aspect_types(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListAspectTypes", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListAspectTypes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListAspectTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.ListAspectTypesResponse() + pb_resp = catalog.ListAspectTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_aspect_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_aspect_types_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.ListAspectTypesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListAspectTypes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEntries(_BaseCatalogServiceRestTransport._BaseListEntries, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListEntries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.ListEntriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.ListEntriesResponse: + r"""Call the list entries method over HTTP. + + Args: + request (~.catalog.ListEntriesRequest): + The request object. List Entries request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.ListEntriesResponse: + List Entries response. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListEntries._get_http_options() + + request, metadata = self._interceptor.pre_list_entries(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntries._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListEntries._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntries", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListEntries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.ListEntriesResponse() + pb_resp = catalog.ListEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entries_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.ListEntriesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entries", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEntryGroups(_BaseCatalogServiceRestTransport._BaseListEntryGroups, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListEntryGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.ListEntryGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.ListEntryGroupsResponse: + r"""Call the list entry groups method over HTTP. + + Args: + request (~.catalog.ListEntryGroupsRequest): + The request object. List entryGroups request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.ListEntryGroupsResponse: + List entry groups response. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_http_options() + + request, metadata = self._interceptor.pre_list_entry_groups(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntryGroups", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntryGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListEntryGroups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.ListEntryGroupsResponse() + pb_resp = catalog.ListEntryGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_entry_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entry_groups_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.ListEntryGroupsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntryGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEntryTypes(_BaseCatalogServiceRestTransport._BaseListEntryTypes, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListEntryTypes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.ListEntryTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.ListEntryTypesResponse: + r"""Call the list entry types method over HTTP. + + Args: + request (~.catalog.ListEntryTypesRequest): + The request object. List EntryTypes request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.ListEntryTypesResponse: + List EntryTypes response. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_http_options() + + request, metadata = self._interceptor.pre_list_entry_types(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntryTypes", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntryTypes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListEntryTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.ListEntryTypesResponse() + pb_resp = catalog.ListEntryTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_entry_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entry_types_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.ListEntryTypesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListEntryTypes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMetadataJobs(_BaseCatalogServiceRestTransport._BaseListMetadataJobs, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListMetadataJobs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.ListMetadataJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.ListMetadataJobsResponse: + r"""Call the list metadata jobs method over HTTP. + + Args: + request (~.catalog.ListMetadataJobsRequest): + The request object. List metadata jobs request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.ListMetadataJobsResponse: + List metadata jobs response. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_http_options() + + request, metadata = self._interceptor.pre_list_metadata_jobs(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListMetadataJobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListMetadataJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListMetadataJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.ListMetadataJobsResponse() + pb_resp = catalog.ListMetadataJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_metadata_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_metadata_jobs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.ListMetadataJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListMetadataJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _LookupEntry(_BaseCatalogServiceRestTransport._BaseLookupEntry, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.LookupEntry") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.LookupEntryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.Entry: + r"""Call the lookup entry method over HTTP. + + Args: + request (~.catalog.LookupEntryRequest): + The request object. Lookup Entry request using + permissions in the source system. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_http_options() + + request, metadata = self._interceptor.pre_lookup_entry(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.LookupEntry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "LookupEntry", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._LookupEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.Entry() + pb_resp = catalog.Entry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_lookup_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_entry_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.Entry.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "LookupEntry", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SearchEntries(_BaseCatalogServiceRestTransport._BaseSearchEntries, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.SearchEntries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: catalog.SearchEntriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.SearchEntriesResponse: + r"""Call the search entries method over HTTP. + + Args: + request (~.catalog.SearchEntriesRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.SearchEntriesResponse: + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_http_options() + + request, metadata = self._interceptor.pre_search_entries(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.SearchEntries", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "SearchEntries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._SearchEntries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.SearchEntriesResponse() + pb_resp = catalog.SearchEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_search_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_entries_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.SearchEntriesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.search_entries", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "SearchEntries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateAspectType(_BaseCatalogServiceRestTransport._BaseUpdateAspectType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.UpdateAspectType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.UpdateAspectTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update aspect type method over HTTP. + + Args: + request (~.catalog.UpdateAspectTypeRequest): + The request object. Update AspectType Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_http_options() + + request, metadata = self._interceptor.pre_update_aspect_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateAspectType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateAspectType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._UpdateAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_aspect_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_aspect_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateAspectType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEntry(_BaseCatalogServiceRestTransport._BaseUpdateEntry, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.UpdateEntry") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.UpdateEntryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> catalog.Entry: + r"""Call the update entry method over HTTP. + + Args: + request (~.catalog.UpdateEntryRequest): + The request object. Update Entry request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.Entry: + An entry is a representation of a + data resource that can be described by + various metadata. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_http_options() + + request, metadata = self._interceptor.pre_update_entry(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntry", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._UpdateEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.Entry() + pb_resp = catalog.Entry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entry_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = catalog.Entry.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntry", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEntryGroup(_BaseCatalogServiceRestTransport._BaseUpdateEntryGroup, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.UpdateEntryGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.UpdateEntryGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update entry group method over HTTP. + + Args: + request (~.catalog.UpdateEntryGroupRequest): + The request object. Update EntryGroup Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_http_options() + + request, metadata = self._interceptor.pre_update_entry_group(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntryGroup", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntryGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._UpdateEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_entry_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entry_group_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntryGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEntryType(_BaseCatalogServiceRestTransport._BaseUpdateEntryType, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.UpdateEntryType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: catalog.UpdateEntryTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update entry type method over HTTP. + + Args: + request (~.catalog.UpdateEntryTypeRequest): + The request object. Update EntryType Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_http_options() + + request, metadata = self._interceptor.pre_update_entry_type(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntryType", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntryType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._UpdateEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_entry_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entry_type_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "UpdateEntryType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def cancel_metadata_job(self) -> Callable[ + [catalog.CancelMetadataJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelMetadataJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_aspect_type(self) -> Callable[ + [catalog.CreateAspectTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAspectType(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entry(self) -> Callable[ + [catalog.CreateEntryRequest], + catalog.Entry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entry_group(self) -> Callable[ + [catalog.CreateEntryGroupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntryGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entry_link(self) -> Callable[ + [catalog.CreateEntryLinkRequest], + catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entry_type(self) -> Callable[ + [catalog.CreateEntryTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntryType(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_metadata_job(self) -> Callable[ + [catalog.CreateMetadataJobRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMetadataJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_aspect_type(self) -> Callable[ + [catalog.DeleteAspectTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAspectType(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entry(self) -> Callable[ + [catalog.DeleteEntryRequest], + catalog.Entry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entry_group(self) -> Callable[ + [catalog.DeleteEntryGroupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntryGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entry_link(self) -> Callable[ + [catalog.DeleteEntryLinkRequest], + catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entry_type(self) -> Callable[ + [catalog.DeleteEntryTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntryType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_aspect_type(self) -> Callable[ + [catalog.GetAspectTypeRequest], + catalog.AspectType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAspectType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entry(self) -> Callable[ + [catalog.GetEntryRequest], + catalog.Entry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entry_group(self) -> Callable[ + [catalog.GetEntryGroupRequest], + catalog.EntryGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntryGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entry_link(self) -> Callable[ + [catalog.GetEntryLinkRequest], + catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntryLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entry_type(self) -> Callable[ + [catalog.GetEntryTypeRequest], + catalog.EntryType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntryType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_metadata_job(self) -> Callable[ + [catalog.GetMetadataJobRequest], + catalog.MetadataJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMetadataJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_aspect_types(self) -> Callable[ + [catalog.ListAspectTypesRequest], + catalog.ListAspectTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAspectTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entries(self) -> Callable[ + [catalog.ListEntriesRequest], + catalog.ListEntriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entry_groups(self) -> Callable[ + [catalog.ListEntryGroupsRequest], + catalog.ListEntryGroupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntryGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entry_types(self) -> Callable[ + [catalog.ListEntryTypesRequest], + catalog.ListEntryTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntryTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_metadata_jobs(self) -> Callable[ + [catalog.ListMetadataJobsRequest], + catalog.ListMetadataJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMetadataJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup_entry(self) -> Callable[ + [catalog.LookupEntryRequest], + catalog.Entry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LookupEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_entries(self) -> Callable[ + [catalog.SearchEntriesRequest], + catalog.SearchEntriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_aspect_type(self) -> Callable[ + [catalog.UpdateAspectTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAspectType(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entry(self) -> Callable[ + [catalog.UpdateEntryRequest], + catalog.Entry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entry_group(self) -> Callable[ + [catalog.UpdateEntryGroupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntryGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entry_type(self) -> Callable[ + [catalog.UpdateEntryTypeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntryType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCatalogServiceRestTransport._BaseGetLocation, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCatalogServiceRestTransport._BaseListLocations, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCatalogServiceRestTransport._BaseCancelOperation, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCatalogServiceRestTransport._BaseDeleteOperation, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCatalogServiceRestTransport._BaseGetOperation, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCatalogServiceRestTransport._BaseListOperations, CatalogServiceRestStub): + def __hash__(self): + return hash("CatalogServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCatalogServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CatalogServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py new file mode 100644 index 000000000000..b48f7ba603d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py @@ -0,0 +1,1451 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import catalog +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseCatalogServiceRestTransport(CatalogServiceTransport): + """Base REST backend transport for CatalogService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCancelMetadataJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CancelMetadataJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateAspectType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "aspectTypeId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/aspectTypes', + 'body': 'aspect_type', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateAspectTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateAspectType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateEntry: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entries', + 'body': 'entry', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntry._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateEntryGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryGroupId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/entryGroups', + 'body': 'entry_group', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryLinkId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks', + 'body': 'entry_link', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateEntryType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryTypeId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/entryTypes', + 'body': 'entry_type', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateMetadataJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/metadataJobs', + 'body': 'metadata_job', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateMetadataJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteAspectType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/aspectTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteAspectTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEntry: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntry._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEntryGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEntryType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/entryTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAspectType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/aspectTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetAspectTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetAspectType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEntry: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntry._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEntryGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryLink._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEntryType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/entryTypes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMetadataJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/metadataJobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetMetadataJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAspectTypes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/aspectTypes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.ListAspectTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseListAspectTypes._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEntries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entries', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.ListEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseListEntries._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEntryGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/entryGroups', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.ListEntryGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseListEntryGroups._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEntryTypes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/entryTypes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.ListEntryTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseListEntryTypes._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListMetadataJobs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/metadataJobs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.ListMetadataJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseLookupEntry: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entry" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}:lookupEntry', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.LookupEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseLookupEntry._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSearchEntries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*}:searchEntries', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.SearchEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseSearchEntries._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateAspectType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}', + 'body': 'aspect_type', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.UpdateAspectTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEntry: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}', + 'body': 'entry', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.UpdateEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntry._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEntryGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}', + 'body': 'entry_group', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.UpdateEntryGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEntryType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}', + 'body': 'entry_type', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.UpdateEntryTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseCatalogServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py new file mode 100644 index 000000000000..adda73051cc6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CmekServiceClient +from .async_client import CmekServiceAsyncClient + +__all__ = ( + 'CmekServiceClient', + 'CmekServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py new file mode 100644 index 000000000000..92bf65140225 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py @@ -0,0 +1,1216 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.cmek_service import pagers +from google.cloud.dataplex_v1.types import cmek +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CmekServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CmekServiceGrpcAsyncIOTransport +from .client import CmekServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class CmekServiceAsyncClient: + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ + + _client: CmekServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CmekServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CmekServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CmekServiceClient._DEFAULT_UNIVERSE + + encryption_config_path = staticmethod(CmekServiceClient.encryption_config_path) + parse_encryption_config_path = staticmethod(CmekServiceClient.parse_encryption_config_path) + organization_location_path = staticmethod(CmekServiceClient.organization_location_path) + parse_organization_location_path = staticmethod(CmekServiceClient.parse_organization_location_path) + common_billing_account_path = staticmethod(CmekServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CmekServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CmekServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CmekServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CmekServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(CmekServiceClient.parse_common_organization_path) + common_project_path = staticmethod(CmekServiceClient.common_project_path) + parse_common_project_path = staticmethod(CmekServiceClient.parse_common_project_path) + common_location_path = staticmethod(CmekServiceClient.common_location_path) + parse_common_location_path = staticmethod(CmekServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CmekServiceAsyncClient: The constructed client. + """ + return CmekServiceClient.from_service_account_info.__func__(CmekServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CmekServiceAsyncClient: The constructed client. + """ + return CmekServiceClient.from_service_account_file.__func__(CmekServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CmekServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CmekServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CmekServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CmekServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CmekServiceTransport, Callable[..., CmekServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cmek service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CmekServiceTransport,Callable[..., CmekServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CmekServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CmekServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.CmekServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "credentialsType": None, + } + ) + + async def create_encryption_config(self, + request: Optional[Union[cmek.CreateEncryptionConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + encryption_config: Optional[cmek.EncryptionConfig] = None, + encryption_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEncryptionConfigRequest( + parent="parent_value", + encryption_config_id="encryption_config_id_value", + ) + + # Make the request + operation = client.create_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest, dict]]): + The request object. Create EncryptionConfig Request + parent (:class:`str`): + Required. The location at which the + EncryptionConfig is to be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encryption_config (:class:`google.cloud.dataplex_v1.types.EncryptionConfig`): + Required. The EncryptionConfig to + create. + + This corresponds to the ``encryption_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encryption_config_id (:class:`str`): + Required. The ID of the + [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] + to create. Currently, only a value of "default" is + supported. + + This corresponds to the ``encryption_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to + support Customer Managed Encryption Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, encryption_config, encryption_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.CreateEncryptionConfigRequest): + request = cmek.CreateEncryptionConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if encryption_config is not None: + request.encryption_config = encryption_config + if encryption_config_id is not None: + request.encryption_config_id = encryption_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cmek.EncryptionConfig, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_encryption_config(self, + request: Optional[Union[cmek.UpdateEncryptionConfigRequest, dict]] = None, + *, + encryption_config: Optional[cmek.EncryptionConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEncryptionConfigRequest( + ) + + # Make the request + operation = client.update_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest, dict]]): + The request object. Update EncryptionConfig Request + encryption_config (:class:`google.cloud.dataplex_v1.types.EncryptionConfig`): + Required. The EncryptionConfig to + update. + + This corresponds to the ``encryption_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Mask of fields to update. + The service treats an omitted field mask + as an implied field mask equivalent to + all fields that are populated (have a + non-empty value). + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to + support Customer Managed Encryption Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [encryption_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.UpdateEncryptionConfigRequest): + request = cmek.UpdateEncryptionConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_config is not None: + request.encryption_config = encryption_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("encryption_config.name", request.encryption_config.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cmek.EncryptionConfig, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_encryption_config(self, + request: Optional[Union[cmek.DeleteEncryptionConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest, dict]]): + The request object. Delete EncryptionConfig Request + name (:class:`str`): + Required. The name of the + EncryptionConfig to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.DeleteEncryptionConfigRequest): + request = cmek.DeleteEncryptionConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_encryption_configs(self, + request: Optional[Union[cmek.ListEncryptionConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEncryptionConfigsAsyncPager: + r"""List EncryptionConfigs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_encryption_configs(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEncryptionConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_encryption_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest, dict]]): + The request object. List EncryptionConfigs Request + parent (:class:`str`): + Required. The location for which the + EncryptionConfig is to be listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsAsyncPager: + List EncryptionConfigs Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.ListEncryptionConfigsRequest): + request = cmek.ListEncryptionConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_encryption_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEncryptionConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_encryption_config(self, + request: Optional[Union[cmek.GetEncryptionConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cmek.EncryptionConfig: + r"""Get an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEncryptionConfigRequest, dict]]): + The request object. Get EncryptionConfig Request + name (:class:`str`): + Required. The name of the + EncryptionConfig to fetch. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EncryptionConfig: + A Resource designed to manage + encryption configurations for customers + to support Customer Managed Encryption + Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.GetEncryptionConfigRequest): + request = cmek.GetEncryptionConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CmekServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "CmekServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py new file mode 100644 index 000000000000..71a106c85fda --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py @@ -0,0 +1,1592 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.cmek_service import pagers +from google.cloud.dataplex_v1.types import cmek +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CmekServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CmekServiceGrpcTransport +from .transports.grpc_asyncio import CmekServiceGrpcAsyncIOTransport +from .transports.rest import CmekServiceRestTransport + + +class CmekServiceClientMeta(type): + """Metaclass for the CmekService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CmekServiceTransport]] + _transport_registry["grpc"] = CmekServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CmekServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CmekServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CmekServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CmekServiceClient(metaclass=CmekServiceClientMeta): + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CmekServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CmekServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CmekServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CmekServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def encryption_config_path(organization: str,location: str,encryption_config: str,) -> str: + """Returns a fully-qualified encryption_config string.""" + return "organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config}".format(organization=organization, location=location, encryption_config=encryption_config, ) + + @staticmethod + def parse_encryption_config_path(path: str) -> Dict[str,str]: + """Parses a encryption_config path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/encryptionConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def organization_location_path(organization: str,location: str,) -> str: + """Returns a fully-qualified organization_location string.""" + return "organizations/{organization}/locations/{location}".format(organization=organization, location=location, ) + + @staticmethod + def parse_organization_location_path(path: str) -> Dict[str,str]: + """Parses a organization_location path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CmekServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = CmekServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CmekServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CmekServiceTransport, Callable[..., CmekServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cmek service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CmekServiceTransport,Callable[..., CmekServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CmekServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CmekServiceClient._read_environment_variables() + self._client_cert_source = CmekServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = CmekServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CmekServiceTransport) + if transport_provided: + # transport is a CmekServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CmekServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CmekServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[CmekServiceTransport], Callable[..., CmekServiceTransport]] = ( + CmekServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CmekServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.CmekServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "credentialsType": None, + } + ) + + def create_encryption_config(self, + request: Optional[Union[cmek.CreateEncryptionConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + encryption_config: Optional[cmek.EncryptionConfig] = None, + encryption_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEncryptionConfigRequest( + parent="parent_value", + encryption_config_id="encryption_config_id_value", + ) + + # Make the request + operation = client.create_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest, dict]): + The request object. Create EncryptionConfig Request + parent (str): + Required. The location at which the + EncryptionConfig is to be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): + Required. The EncryptionConfig to + create. + + This corresponds to the ``encryption_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encryption_config_id (str): + Required. The ID of the + [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] + to create. Currently, only a value of "default" is + supported. + + This corresponds to the ``encryption_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to + support Customer Managed Encryption Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, encryption_config, encryption_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.CreateEncryptionConfigRequest): + request = cmek.CreateEncryptionConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if encryption_config is not None: + request.encryption_config = encryption_config + if encryption_config_id is not None: + request.encryption_config_id = encryption_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cmek.EncryptionConfig, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_encryption_config(self, + request: Optional[Union[cmek.UpdateEncryptionConfigRequest, dict]] = None, + *, + encryption_config: Optional[cmek.EncryptionConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEncryptionConfigRequest( + ) + + # Make the request + operation = client.update_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest, dict]): + The request object. Update EncryptionConfig Request + encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): + Required. The EncryptionConfig to + update. + + This corresponds to the ``encryption_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. + The service treats an omitted field mask + as an implied field mask equivalent to + all fields that are populated (have a + non-empty value). + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to + support Customer Managed Encryption Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [encryption_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.UpdateEncryptionConfigRequest): + request = cmek.UpdateEncryptionConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if encryption_config is not None: + request.encryption_config = encryption_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("encryption_config.name", request.encryption_config.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cmek.EncryptionConfig, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_encryption_config(self, + request: Optional[Union[cmek.DeleteEncryptionConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest, dict]): + The request object. Delete EncryptionConfig Request + name (str): + Required. The name of the + EncryptionConfig to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.DeleteEncryptionConfigRequest): + request = cmek.DeleteEncryptionConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_encryption_configs(self, + request: Optional[Union[cmek.ListEncryptionConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEncryptionConfigsPager: + r"""List EncryptionConfigs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_encryption_configs(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEncryptionConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_encryption_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest, dict]): + The request object. List EncryptionConfigs Request + parent (str): + Required. The location for which the + EncryptionConfig is to be listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsPager: + List EncryptionConfigs Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.ListEncryptionConfigsRequest): + request = cmek.ListEncryptionConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_encryption_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEncryptionConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_encryption_config(self, + request: Optional[Union[cmek.GetEncryptionConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cmek.EncryptionConfig: + r"""Get an EncryptionConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEncryptionConfigRequest, dict]): + The request object. Get EncryptionConfig Request + name (str): + Required. The name of the + EncryptionConfig to fetch. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EncryptionConfig: + A Resource designed to manage + encryption configurations for customers + to support Customer Managed Encryption + Keys (CMEK). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cmek.GetEncryptionConfigRequest): + request = cmek.GetEncryptionConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_encryption_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CmekServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "CmekServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py new file mode 100644 index 000000000000..093f10c95405 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import cmek + + +class ListEncryptionConfigsPager: + """A pager for iterating through ``list_encryption_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``encryption_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEncryptionConfigs`` requests and continue to iterate + through the ``encryption_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cmek.ListEncryptionConfigsResponse], + request: cmek.ListEncryptionConfigsRequest, + response: cmek.ListEncryptionConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cmek.ListEncryptionConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cmek.ListEncryptionConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cmek.EncryptionConfig]: + for page in self.pages: + yield from page.encryption_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEncryptionConfigsAsyncPager: + """A pager for iterating through ``list_encryption_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``encryption_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEncryptionConfigs`` requests and continue to iterate + through the ``encryption_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cmek.ListEncryptionConfigsResponse]], + request: cmek.ListEncryptionConfigsRequest, + response: cmek.ListEncryptionConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cmek.ListEncryptionConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cmek.ListEncryptionConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cmek.EncryptionConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.encryption_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst new file mode 100644 index 000000000000..911db2245cc9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CmekServiceTransport` is the ABC for all transports. +- public child `CmekServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CmekServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCmekServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CmekServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py new file mode 100644 index 000000000000..66f1072837c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CmekServiceTransport +from .grpc import CmekServiceGrpcTransport +from .grpc_asyncio import CmekServiceGrpcAsyncIOTransport +from .rest import CmekServiceRestTransport +from .rest import CmekServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CmekServiceTransport]] +_transport_registry['grpc'] = CmekServiceGrpcTransport +_transport_registry['grpc_asyncio'] = CmekServiceGrpcAsyncIOTransport +_transport_registry['rest'] = CmekServiceRestTransport + +__all__ = ( + 'CmekServiceTransport', + 'CmekServiceGrpcTransport', + 'CmekServiceGrpcAsyncIOTransport', + 'CmekServiceRestTransport', + 'CmekServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py new file mode 100644 index 000000000000..26a858fe4793 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import cmek +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class CmekServiceTransport(abc.ABC): + """Abstract transport class for CmekService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_encryption_config: gapic_v1.method.wrap_method( + self.create_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.update_encryption_config: gapic_v1.method.wrap_method( + self.update_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_encryption_config: gapic_v1.method.wrap_method( + self.delete_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.list_encryption_configs: gapic_v1.method.wrap_method( + self.list_encryption_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_encryption_config: gapic_v1.method.wrap_method( + self.get_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_encryption_config(self) -> Callable[ + [cmek.CreateEncryptionConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_encryption_config(self) -> Callable[ + [cmek.UpdateEncryptionConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_encryption_config(self) -> Callable[ + [cmek.DeleteEncryptionConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_encryption_configs(self) -> Callable[ + [cmek.ListEncryptionConfigsRequest], + Union[ + cmek.ListEncryptionConfigsResponse, + Awaitable[cmek.ListEncryptionConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_encryption_config(self) -> Callable[ + [cmek.GetEncryptionConfigRequest], + Union[ + cmek.EncryptionConfig, + Awaitable[cmek.EncryptionConfig] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CmekServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py new file mode 100644 index 000000000000..21a9803af183 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import cmek +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CmekServiceGrpcTransport(CmekServiceTransport): + """gRPC backend transport for CmekService. + + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_encryption_config(self) -> Callable[ + [cmek.CreateEncryptionConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the create encryption config method over gRPC. + + Create an EncryptionConfig. + + Returns: + Callable[[~.CreateEncryptionConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_encryption_config' not in self._stubs: + self._stubs['create_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/CreateEncryptionConfig', + request_serializer=cmek.CreateEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_encryption_config'] + + @property + def update_encryption_config(self) -> Callable[ + [cmek.UpdateEncryptionConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the update encryption config method over gRPC. + + Update an EncryptionConfig. + + Returns: + Callable[[~.UpdateEncryptionConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_encryption_config' not in self._stubs: + self._stubs['update_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/UpdateEncryptionConfig', + request_serializer=cmek.UpdateEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_encryption_config'] + + @property + def delete_encryption_config(self) -> Callable[ + [cmek.DeleteEncryptionConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete encryption config method over gRPC. + + Delete an EncryptionConfig. + + Returns: + Callable[[~.DeleteEncryptionConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_encryption_config' not in self._stubs: + self._stubs['delete_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/DeleteEncryptionConfig', + request_serializer=cmek.DeleteEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_encryption_config'] + + @property + def list_encryption_configs(self) -> Callable[ + [cmek.ListEncryptionConfigsRequest], + cmek.ListEncryptionConfigsResponse]: + r"""Return a callable for the list encryption configs method over gRPC. + + List EncryptionConfigs. + + Returns: + Callable[[~.ListEncryptionConfigsRequest], + ~.ListEncryptionConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_encryption_configs' not in self._stubs: + self._stubs['list_encryption_configs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/ListEncryptionConfigs', + request_serializer=cmek.ListEncryptionConfigsRequest.serialize, + response_deserializer=cmek.ListEncryptionConfigsResponse.deserialize, + ) + return self._stubs['list_encryption_configs'] + + @property + def get_encryption_config(self) -> Callable[ + [cmek.GetEncryptionConfigRequest], + cmek.EncryptionConfig]: + r"""Return a callable for the get encryption config method over gRPC. + + Get an EncryptionConfig. + + Returns: + Callable[[~.GetEncryptionConfigRequest], + ~.EncryptionConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_encryption_config' not in self._stubs: + self._stubs['get_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/GetEncryptionConfig', + request_serializer=cmek.GetEncryptionConfigRequest.serialize, + response_deserializer=cmek.EncryptionConfig.deserialize, + ) + return self._stubs['get_encryption_config'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CmekServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..47e4dc2aadaf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py @@ -0,0 +1,652 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import cmek +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import CmekServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CmekServiceGrpcAsyncIOTransport(CmekServiceTransport): + """gRPC AsyncIO backend transport for CmekService. + + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_encryption_config(self) -> Callable[ + [cmek.CreateEncryptionConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create encryption config method over gRPC. + + Create an EncryptionConfig. + + Returns: + Callable[[~.CreateEncryptionConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_encryption_config' not in self._stubs: + self._stubs['create_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/CreateEncryptionConfig', + request_serializer=cmek.CreateEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_encryption_config'] + + @property + def update_encryption_config(self) -> Callable[ + [cmek.UpdateEncryptionConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update encryption config method over gRPC. + + Update an EncryptionConfig. + + Returns: + Callable[[~.UpdateEncryptionConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_encryption_config' not in self._stubs: + self._stubs['update_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/UpdateEncryptionConfig', + request_serializer=cmek.UpdateEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_encryption_config'] + + @property + def delete_encryption_config(self) -> Callable[ + [cmek.DeleteEncryptionConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete encryption config method over gRPC. + + Delete an EncryptionConfig. + + Returns: + Callable[[~.DeleteEncryptionConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_encryption_config' not in self._stubs: + self._stubs['delete_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/DeleteEncryptionConfig', + request_serializer=cmek.DeleteEncryptionConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_encryption_config'] + + @property + def list_encryption_configs(self) -> Callable[ + [cmek.ListEncryptionConfigsRequest], + Awaitable[cmek.ListEncryptionConfigsResponse]]: + r"""Return a callable for the list encryption configs method over gRPC. + + List EncryptionConfigs. + + Returns: + Callable[[~.ListEncryptionConfigsRequest], + Awaitable[~.ListEncryptionConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_encryption_configs' not in self._stubs: + self._stubs['list_encryption_configs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/ListEncryptionConfigs', + request_serializer=cmek.ListEncryptionConfigsRequest.serialize, + response_deserializer=cmek.ListEncryptionConfigsResponse.deserialize, + ) + return self._stubs['list_encryption_configs'] + + @property + def get_encryption_config(self) -> Callable[ + [cmek.GetEncryptionConfigRequest], + Awaitable[cmek.EncryptionConfig]]: + r"""Return a callable for the get encryption config method over gRPC. + + Get an EncryptionConfig. + + Returns: + Callable[[~.GetEncryptionConfigRequest], + Awaitable[~.EncryptionConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_encryption_config' not in self._stubs: + self._stubs['get_encryption_config'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.CmekService/GetEncryptionConfig', + request_serializer=cmek.GetEncryptionConfigRequest.serialize, + response_deserializer=cmek.EncryptionConfig.deserialize, + ) + return self._stubs['get_encryption_config'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_encryption_config: self._wrap_method( + self.create_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.update_encryption_config: self._wrap_method( + self.update_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_encryption_config: self._wrap_method( + self.delete_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.list_encryption_configs: self._wrap_method( + self.list_encryption_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_encryption_config: self._wrap_method( + self.get_encryption_config, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'CmekServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py new file mode 100644 index 000000000000..ecfbd2a2a163 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py @@ -0,0 +1,1907 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import cmek +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseCmekServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class CmekServiceRestInterceptor: + """Interceptor for CmekService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CmekServiceRestTransport. + + .. code-block:: python + class MyCustomCmekServiceInterceptor(CmekServiceRestInterceptor): + def pre_create_encryption_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_encryption_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_encryption_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_encryption_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_encryption_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_encryption_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_encryption_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_encryption_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_encryption_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_encryption_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CmekServiceRestTransport(interceptor=MyCustomCmekServiceInterceptor()) + client = CmekServiceClient(transport=transport) + + + """ + def pre_create_encryption_config(self, request: cmek.CreateEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.CreateEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_encryption_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_create_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_encryption_config + + DEPRECATED. Please use the `post_create_encryption_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. This `post_create_encryption_config` interceptor runs + before the `post_create_encryption_config_with_metadata` interceptor. + """ + return response + + def post_create_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_encryption_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CmekService server but before it is returned to user code. + + We recommend only using this `post_create_encryption_config_with_metadata` + interceptor in new development instead of the `post_create_encryption_config` interceptor. + When both interceptors are used, this `post_create_encryption_config_with_metadata` interceptor runs after the + `post_create_encryption_config` interceptor. The (possibly modified) response returned by + `post_create_encryption_config` will be passed to + `post_create_encryption_config_with_metadata`. + """ + return response, metadata + + def pre_delete_encryption_config(self, request: cmek.DeleteEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.DeleteEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_encryption_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_delete_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_encryption_config + + DEPRECATED. Please use the `post_delete_encryption_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. This `post_delete_encryption_config` interceptor runs + before the `post_delete_encryption_config_with_metadata` interceptor. + """ + return response + + def post_delete_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_encryption_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CmekService server but before it is returned to user code. + + We recommend only using this `post_delete_encryption_config_with_metadata` + interceptor in new development instead of the `post_delete_encryption_config` interceptor. + When both interceptors are used, this `post_delete_encryption_config_with_metadata` interceptor runs after the + `post_delete_encryption_config` interceptor. The (possibly modified) response returned by + `post_delete_encryption_config` will be passed to + `post_delete_encryption_config_with_metadata`. + """ + return response, metadata + + def pre_get_encryption_config(self, request: cmek.GetEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.GetEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_encryption_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_get_encryption_config(self, response: cmek.EncryptionConfig) -> cmek.EncryptionConfig: + """Post-rpc interceptor for get_encryption_config + + DEPRECATED. Please use the `post_get_encryption_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. This `post_get_encryption_config` interceptor runs + before the `post_get_encryption_config_with_metadata` interceptor. + """ + return response + + def post_get_encryption_config_with_metadata(self, response: cmek.EncryptionConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.EncryptionConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_encryption_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CmekService server but before it is returned to user code. + + We recommend only using this `post_get_encryption_config_with_metadata` + interceptor in new development instead of the `post_get_encryption_config` interceptor. + When both interceptors are used, this `post_get_encryption_config_with_metadata` interceptor runs after the + `post_get_encryption_config` interceptor. The (possibly modified) response returned by + `post_get_encryption_config` will be passed to + `post_get_encryption_config_with_metadata`. + """ + return response, metadata + + def pre_list_encryption_configs(self, request: cmek.ListEncryptionConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.ListEncryptionConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_encryption_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_list_encryption_configs(self, response: cmek.ListEncryptionConfigsResponse) -> cmek.ListEncryptionConfigsResponse: + """Post-rpc interceptor for list_encryption_configs + + DEPRECATED. Please use the `post_list_encryption_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. This `post_list_encryption_configs` interceptor runs + before the `post_list_encryption_configs_with_metadata` interceptor. + """ + return response + + def post_list_encryption_configs_with_metadata(self, response: cmek.ListEncryptionConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.ListEncryptionConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_encryption_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CmekService server but before it is returned to user code. + + We recommend only using this `post_list_encryption_configs_with_metadata` + interceptor in new development instead of the `post_list_encryption_configs` interceptor. + When both interceptors are used, this `post_list_encryption_configs_with_metadata` interceptor runs after the + `post_list_encryption_configs` interceptor. The (possibly modified) response returned by + `post_list_encryption_configs` will be passed to + `post_list_encryption_configs_with_metadata`. + """ + return response, metadata + + def pre_update_encryption_config(self, request: cmek.UpdateEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.UpdateEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_encryption_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_update_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_encryption_config + + DEPRECATED. Please use the `post_update_encryption_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. This `post_update_encryption_config` interceptor runs + before the `post_update_encryption_config_with_metadata` interceptor. + """ + return response + + def post_update_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_encryption_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CmekService server but before it is returned to user code. + + We recommend only using this `post_update_encryption_config_with_metadata` + interceptor in new development instead of the `post_update_encryption_config` interceptor. + When both interceptors are used, this `post_update_encryption_config_with_metadata` interceptor runs after the + `post_update_encryption_config` interceptor. The (possibly modified) response returned by + `post_update_encryption_config` will be passed to + `post_update_encryption_config_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CmekService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CmekService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CmekServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CmekServiceRestInterceptor + + +class CmekServiceRestTransport(_BaseCmekServiceRestTransport): + """REST backend synchronous transport for CmekService. + + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CmekServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CmekServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateEncryptionConfig(_BaseCmekServiceRestTransport._BaseCreateEncryptionConfig, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.CreateEncryptionConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cmek.CreateEncryptionConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create encryption config method over HTTP. + + Args: + request (~.cmek.CreateEncryptionConfigRequest): + The request object. Create EncryptionConfig Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_http_options() + + request, metadata = self._interceptor.pre_create_encryption_config(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_transcoded_request(http_options, request) + + body = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.CreateEncryptionConfig", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "CreateEncryptionConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._CreateEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_encryption_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_encryption_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceClient.create_encryption_config", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "CreateEncryptionConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEncryptionConfig(_BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.DeleteEncryptionConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cmek.DeleteEncryptionConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete encryption config method over HTTP. + + Args: + request (~.cmek.DeleteEncryptionConfigRequest): + The request object. Delete EncryptionConfig Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_http_options() + + request, metadata = self._interceptor.pre_delete_encryption_config(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.DeleteEncryptionConfig", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "DeleteEncryptionConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._DeleteEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_encryption_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_encryption_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceClient.delete_encryption_config", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "DeleteEncryptionConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEncryptionConfig(_BaseCmekServiceRestTransport._BaseGetEncryptionConfig, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.GetEncryptionConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cmek.GetEncryptionConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cmek.EncryptionConfig: + r"""Call the get encryption config method over HTTP. + + Args: + request (~.cmek.GetEncryptionConfigRequest): + The request object. Get EncryptionConfig Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cmek.EncryptionConfig: + A Resource designed to manage + encryption configurations for customers + to support Customer Managed Encryption + Keys (CMEK). + + """ + + http_options = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_http_options() + + request, metadata = self._interceptor.pre_get_encryption_config(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetEncryptionConfig", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetEncryptionConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._GetEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cmek.EncryptionConfig() + pb_resp = cmek.EncryptionConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_encryption_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_encryption_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cmek.EncryptionConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceClient.get_encryption_config", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetEncryptionConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEncryptionConfigs(_BaseCmekServiceRestTransport._BaseListEncryptionConfigs, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.ListEncryptionConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cmek.ListEncryptionConfigsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cmek.ListEncryptionConfigsResponse: + r"""Call the list encryption configs method over HTTP. + + Args: + request (~.cmek.ListEncryptionConfigsRequest): + The request object. List EncryptionConfigs Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cmek.ListEncryptionConfigsResponse: + List EncryptionConfigs Response + """ + + http_options = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_http_options() + + request, metadata = self._interceptor.pre_list_encryption_configs(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListEncryptionConfigs", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListEncryptionConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._ListEncryptionConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cmek.ListEncryptionConfigsResponse() + pb_resp = cmek.ListEncryptionConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_encryption_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_encryption_configs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cmek.ListEncryptionConfigsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceClient.list_encryption_configs", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListEncryptionConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEncryptionConfig(_BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.UpdateEncryptionConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cmek.UpdateEncryptionConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update encryption config method over HTTP. + + Args: + request (~.cmek.UpdateEncryptionConfigRequest): + The request object. Update EncryptionConfig Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_http_options() + + request, metadata = self._interceptor.pre_update_encryption_config(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_transcoded_request(http_options, request) + + body = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.UpdateEncryptionConfig", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "UpdateEncryptionConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._UpdateEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_encryption_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_encryption_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceClient.update_encryption_config", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "UpdateEncryptionConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_encryption_config(self) -> Callable[ + [cmek.CreateEncryptionConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_encryption_config(self) -> Callable[ + [cmek.DeleteEncryptionConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_encryption_config(self) -> Callable[ + [cmek.GetEncryptionConfigRequest], + cmek.EncryptionConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_encryption_configs(self) -> Callable[ + [cmek.ListEncryptionConfigsRequest], + cmek.ListEncryptionConfigsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEncryptionConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_encryption_config(self) -> Callable[ + [cmek.UpdateEncryptionConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCmekServiceRestTransport._BaseGetLocation, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCmekServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCmekServiceRestTransport._BaseListLocations, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCmekServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCmekServiceRestTransport._BaseCancelOperation, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCmekServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseCmekServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCmekServiceRestTransport._BaseDeleteOperation, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCmekServiceRestTransport._BaseGetOperation, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCmekServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCmekServiceRestTransport._BaseListOperations, CmekServiceRestStub): + def __hash__(self): + return hash("CmekServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCmekServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCmekServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCmekServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CmekServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.CmekService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CmekServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py new file mode 100644 index 000000000000..b986fd0e4523 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import cmek +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseCmekServiceRestTransport(CmekServiceTransport): + """Base REST backend transport for CmekService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateEncryptionConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "encryptionConfigId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=organizations/*/locations/*}/encryptionConfigs', + 'body': 'encryption_config', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cmek.CreateEncryptionConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEncryptionConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/encryptionConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cmek.DeleteEncryptionConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEncryptionConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/encryptionConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cmek.GetEncryptionConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEncryptionConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=organizations/*/locations/*}/encryptionConfigs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cmek.ListEncryptionConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEncryptionConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{encryption_config.name=organizations/*/locations/*/encryptionConfigs/*}', + 'body': 'encryption_config', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cmek.UpdateEncryptionConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseCmekServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py new file mode 100644 index 000000000000..ba661ddf8d6b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ContentServiceClient +from .async_client import ContentServiceAsyncClient + +__all__ = ( + 'ContentServiceClient', + 'ContentServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py new file mode 100644 index 000000000000..22b3c5cd4ccb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -0,0 +1,1497 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport +from .client import ContentServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class ContentServiceAsyncClient: + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ + + _client: ContentServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ContentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ContentServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ContentServiceClient._DEFAULT_UNIVERSE + + content_path = staticmethod(ContentServiceClient.content_path) + parse_content_path = staticmethod(ContentServiceClient.parse_content_path) + lake_path = staticmethod(ContentServiceClient.lake_path) + parse_lake_path = staticmethod(ContentServiceClient.parse_lake_path) + common_billing_account_path = staticmethod(ContentServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ContentServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(ContentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ContentServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ContentServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(ContentServiceClient.parse_common_organization_path) + common_project_path = staticmethod(ContentServiceClient.common_project_path) + parse_common_project_path = staticmethod(ContentServiceClient.parse_common_project_path) + common_location_path = staticmethod(ContentServiceClient.common_location_path) + parse_common_location_path = staticmethod(ContentServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_info.__func__(ContentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_file.__func__(ContentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ContentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ContentServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ContentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.ContentServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "credentialsType": None, + } + ) + + async def create_content(self, + request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[analyze.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Create a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = await client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]]): + The request object. Create content request. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, content] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_content(self, + request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, + *, + content: Optional[analyze.Content] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = await client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]]): + The request object. Update content request. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [content, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("content.name", request.content.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_content(self, + request: Optional[Union[content.DeleteContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_content(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]]): + The request object. Delete content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_content(self, + request: Optional[Union[content.GetContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]]): + The request object. Get content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_content(self, + request: Optional[Union[content.ListContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListContentAsyncPager: + r"""List content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]]): + The request object. List content request. Returns the + BASIC Content view. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: + List content response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListContentAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ContentServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "ContentServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py new file mode 100644 index 000000000000..62103cb0645e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py @@ -0,0 +1,1878 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ContentServiceGrpcTransport +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport +from .transports.rest import ContentServiceRestTransport + + +class ContentServiceClientMeta(type): + """Metaclass for the ContentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] + _transport_registry["grpc"] = ContentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ContentServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ContentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ContentServiceClient(metaclass=ContentServiceClientMeta): + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def content_path(project: str,location: str,lake: str,content: str,) -> str: + """Returns a fully-qualified content string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) + + @staticmethod + def parse_content_path(path: str) -> Dict[str,str]: + """Parses a content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/content/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def lake_path(project: str,location: str,lake: str,) -> str: + """Returns a fully-qualified lake string.""" + return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + + @staticmethod + def parse_lake_path(path: str) -> Dict[str,str]: + """Parses a lake path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ContentServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = ContentServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ContentServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ContentServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ContentServiceClient._read_environment_variables() + self._client_cert_source = ContentServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = ContentServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ContentServiceTransport) + if transport_provided: + # transport is a ContentServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ContentServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ContentServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ContentServiceTransport], Callable[..., ContentServiceTransport]] = ( + ContentServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ContentServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.ContentServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "credentialsType": None, + } + ) + + def create_content(self, + request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[analyze.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Create a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): + The request object. Create content request. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, content] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_content(self, + request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, + *, + content: Optional[analyze.Content] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): + The request object. Update content request. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [content, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("content.name", request.content.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_content(self, + request: Optional[Union[content.DeleteContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): + The request object. Delete content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_content(self, + request: Optional[Union[content.GetContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): + The request object. Get content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_content(self, + request: Optional[Union[content.ListContentRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListContentPager: + r"""List content. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): + The request object. List content request. Returns the + BASIC Content view. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: + List content response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListContentPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ContentServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "ContentServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py new file mode 100644 index 000000000000..2892d71ac16d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content + + +class ListContentPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__iter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., content.ListContentResponse], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Content]: + for page in self.pages: + yield from page.content + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListContentAsyncPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[content.ListContentResponse]], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Content]: + async def async_generator(): + async for page in self.pages: + for response in page.content: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst new file mode 100644 index 000000000000..f737919bf8e5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ContentServiceTransport` is the ABC for all transports. +- public child `ContentServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ContentServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseContentServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ContentServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py new file mode 100644 index 000000000000..f5b74440a74f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ContentServiceTransport +from .grpc import ContentServiceGrpcTransport +from .grpc_asyncio import ContentServiceGrpcAsyncIOTransport +from .rest import ContentServiceRestTransport +from .rest import ContentServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] +_transport_registry['grpc'] = ContentServiceGrpcTransport +_transport_registry['grpc_asyncio'] = ContentServiceGrpcAsyncIOTransport +_transport_registry['rest'] = ContentServiceRestTransport + +__all__ = ( + 'ContentServiceTransport', + 'ContentServiceGrpcTransport', + 'ContentServiceGrpcAsyncIOTransport', + 'ContentServiceRestTransport', + 'ContentServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py new file mode 100644 index 000000000000..1482b56942bd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class ContentServiceTransport(abc.ABC): + """Abstract transport class for ContentService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_content: gapic_v1.method.wrap_method( + self.create_content, + default_timeout=60.0, + client_info=client_info, + ), + self.update_content: gapic_v1.method.wrap_method( + self.update_content, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_content: gapic_v1.method.wrap_method( + self.delete_content, + default_timeout=60.0, + client_info=client_info, + ), + self.get_content: gapic_v1.method.wrap_method( + self.get_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_content: gapic_v1.method.wrap_method( + self.list_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + Union[ + analyze.Content, + Awaitable[analyze.Content] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + Union[ + content.ListContentResponse, + Awaitable[content.ListContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ContentServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py new file mode 100644 index 000000000000..6e90fda5e2cf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -0,0 +1,664 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ContentServiceGrpcTransport(ContentServiceTransport): + """gRPC backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + analyze.Content]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_content' not in self._stubs: + self._stubs['create_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/CreateContent', + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['create_content'] + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + analyze.Content]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_content' not in self._stubs: + self._stubs['update_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/UpdateContent', + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['update_content'] + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_content' not in self._stubs: + self._stubs['delete_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/DeleteContent', + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_content'] + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + analyze.Content]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_content' not in self._stubs: + self._stubs['get_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetContent', + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['get_content'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + content.ListContentResponse]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + ~.ListContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_content' not in self._stubs: + self._stubs['list_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/ListContent', + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs['list_content'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ContentServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..10defeeed3a7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import ContentServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): + """gRPC AsyncIO backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_content' not in self._stubs: + self._stubs['create_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/CreateContent', + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['create_content'] + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_content' not in self._stubs: + self._stubs['update_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/UpdateContent', + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['update_content'] + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_content' not in self._stubs: + self._stubs['delete_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/DeleteContent', + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_content'] + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + Awaitable[analyze.Content]]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_content' not in self._stubs: + self._stubs['get_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetContent', + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs['get_content'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a contentitem resource. A + ``NOT_FOUND`` error is returned if the resource does not exist. + An empty policy is returned if the resource exists but does not + have a policy set on it. + + Caller must have Google IAM ``dataplex.content.getIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified contentitem + resource. Replaces any existing policy. + + Caller must have Google IAM ``dataplex.content.setIamPolicy`` + permission on the resource. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (a + ``NOT_FOUND`` error is not returned). + + A caller is not required to have Google IAM permission to make + this request. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + Awaitable[content.ListContentResponse]]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + Awaitable[~.ListContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_content' not in self._stubs: + self._stubs['list_content'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.ContentService/ListContent', + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs['list_content'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_content: self._wrap_method( + self.create_content, + default_timeout=60.0, + client_info=client_info, + ), + self.update_content: self._wrap_method( + self.update_content, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_content: self._wrap_method( + self.delete_content, + default_timeout=60.0, + client_info=client_info, + ), + self.get_content: self._wrap_method( + self.get_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_content: self._wrap_method( + self.list_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'ContentServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py new file mode 100644 index 000000000000..3abbe49f6b41 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py @@ -0,0 +1,2446 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseContentServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class ContentServiceRestInterceptor: + """Interceptor for ContentService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ContentServiceRestTransport. + + .. code-block:: python + class MyCustomContentServiceInterceptor(ContentServiceRestInterceptor): + def pre_create_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ContentServiceRestTransport(interceptor=MyCustomContentServiceInterceptor()) + client = ContentServiceClient(transport=transport) + + + """ + def pre_create_content(self, request: gcd_content.CreateContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_content.CreateContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_create_content(self, response: analyze.Content) -> analyze.Content: + """Post-rpc interceptor for create_content + + DEPRECATED. Please use the `post_create_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_create_content` interceptor runs + before the `post_create_content_with_metadata` interceptor. + """ + return response + + def post_create_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_create_content_with_metadata` + interceptor in new development instead of the `post_create_content` interceptor. + When both interceptors are used, this `post_create_content_with_metadata` interceptor runs after the + `post_create_content` interceptor. The (possibly modified) response returned by + `post_create_content` will be passed to + `post_create_content_with_metadata`. + """ + return response, metadata + + def pre_delete_content(self, request: content.DeleteContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.DeleteContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def pre_get_content(self, request: content.GetContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.GetContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_get_content(self, response: analyze.Content) -> analyze.Content: + """Post-rpc interceptor for get_content + + DEPRECATED. Please use the `post_get_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_get_content` interceptor runs + before the `post_get_content_with_metadata` interceptor. + """ + return response + + def post_get_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_get_content_with_metadata` + interceptor in new development instead of the `post_get_content` interceptor. + When both interceptors are used, this `post_get_content_with_metadata` interceptor runs after the + `post_get_content` interceptor. The (possibly modified) response returned by + `post_get_content` will be passed to + `post_get_content_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_list_content(self, request: content.ListContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.ListContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_list_content(self, response: content.ListContentResponse) -> content.ListContentResponse: + """Post-rpc interceptor for list_content + + DEPRECATED. Please use the `post_list_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_list_content` interceptor runs + before the `post_list_content_with_metadata` interceptor. + """ + return response + + def post_list_content_with_metadata(self, response: content.ListContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.ListContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_list_content_with_metadata` + interceptor in new development instead of the `post_list_content` interceptor. + When both interceptors are used, this `post_list_content_with_metadata` interceptor runs after the + `post_list_content` interceptor. The (possibly modified) response returned by + `post_list_content` will be passed to + `post_list_content_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_content(self, request: gcd_content.UpdateContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_content.UpdateContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_update_content(self, response: analyze.Content) -> analyze.Content: + """Post-rpc interceptor for update_content + + DEPRECATED. Please use the `post_update_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. This `post_update_content` interceptor runs + before the `post_update_content_with_metadata` interceptor. + """ + return response + + def post_update_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContentService server but before it is returned to user code. + + We recommend only using this `post_update_content_with_metadata` + interceptor in new development instead of the `post_update_content` interceptor. + When both interceptors are used, this `post_update_content_with_metadata` interceptor runs after the + `post_update_content` interceptor. The (possibly modified) response returned by + `post_update_content` will be passed to + `post_update_content_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ContentService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ContentService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ContentServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ContentServiceRestInterceptor + + +class ContentServiceRestTransport(_BaseContentServiceRestTransport): + """REST backend synchronous transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ContentServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ContentServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateContent(_BaseContentServiceRestTransport._BaseCreateContent, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.CreateContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gcd_content.CreateContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> analyze.Content: + r"""Call the create content method over HTTP. + + Args: + request (~.gcd_content.CreateContentRequest): + The request object. Create content request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.analyze.Content: + Content represents a user-visible + notebook or a sql script + + """ + + http_options = _BaseContentServiceRestTransport._BaseCreateContent._get_http_options() + + request, metadata = self._interceptor.pre_create_content(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseCreateContent._get_transcoded_request(http_options, request) + + body = _BaseContentServiceRestTransport._BaseCreateContent._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseCreateContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.CreateContent", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "CreateContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._CreateContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analyze.Content() + pb_resp = analyze.Content.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = analyze.Content.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.create_content", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "CreateContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteContent(_BaseContentServiceRestTransport._BaseDeleteContent, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.DeleteContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: content.DeleteContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete content method over HTTP. + + Args: + request (~.content.DeleteContentRequest): + The request object. Delete content request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseContentServiceRestTransport._BaseDeleteContent._get_http_options() + + request, metadata = self._interceptor.pre_delete_content(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseDeleteContent._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseDeleteContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.DeleteContent", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "DeleteContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._DeleteContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetContent(_BaseContentServiceRestTransport._BaseGetContent, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.GetContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: content.GetContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> analyze.Content: + r"""Call the get content method over HTTP. + + Args: + request (~.content.GetContentRequest): + The request object. Get content request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.analyze.Content: + Content represents a user-visible + notebook or a sql script + + """ + + http_options = _BaseContentServiceRestTransport._BaseGetContent._get_http_options() + + request, metadata = self._interceptor.pre_get_content(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseGetContent._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseGetContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetContent", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._GetContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analyze.Content() + pb_resp = analyze.Content.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = analyze.Content.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.get_content", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy(_BaseContentServiceRestTransport._BaseGetIamPolicy, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetIamPolicy", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListContent(_BaseContentServiceRestTransport._BaseListContent, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.ListContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: content.ListContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> content.ListContentResponse: + r"""Call the list content method over HTTP. + + Args: + request (~.content.ListContentRequest): + The request object. List content request. Returns the + BASIC Content view. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.content.ListContentResponse: + List content response. + """ + + http_options = _BaseContentServiceRestTransport._BaseListContent._get_http_options() + + request, metadata = self._interceptor.pre_list_content(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseListContent._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseListContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListContent", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._ListContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = content.ListContentResponse() + pb_resp = content.ListContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = content.ListContentResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.list_content", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy(_BaseContentServiceRestTransport._BaseSetIamPolicy, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.SetIamPolicy", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions(_BaseContentServiceRestTransport._BaseTestIamPermissions, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.TestIamPermissions", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateContent(_BaseContentServiceRestTransport._BaseUpdateContent, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.UpdateContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gcd_content.UpdateContentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> analyze.Content: + r"""Call the update content method over HTTP. + + Args: + request (~.gcd_content.UpdateContentRequest): + The request object. Update content request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.analyze.Content: + Content represents a user-visible + notebook or a sql script + + """ + + http_options = _BaseContentServiceRestTransport._BaseUpdateContent._get_http_options() + + request, metadata = self._interceptor.pre_update_content(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseUpdateContent._get_transcoded_request(http_options, request) + + body = _BaseContentServiceRestTransport._BaseUpdateContent._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseUpdateContent._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.UpdateContent", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "UpdateContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._UpdateContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analyze.Content() + pb_resp = analyze.Content.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_content_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = analyze.Content.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceClient.update_content", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "UpdateContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_content(self) -> Callable[ + [gcd_content.CreateContentRequest], + analyze.Content]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_content(self) -> Callable[ + [content.DeleteContentRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_content(self) -> Callable[ + [content.GetContentRequest], + analyze.Content]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_content(self) -> Callable[ + [content.ListContentRequest], + content.ListContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_content(self) -> Callable[ + [gcd_content.UpdateContentRequest], + analyze.Content]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseContentServiceRestTransport._BaseGetLocation, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseContentServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseContentServiceRestTransport._BaseListLocations, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseContentServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseContentServiceRestTransport._BaseCancelOperation, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseContentServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseContentServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseContentServiceRestTransport._BaseDeleteOperation, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseContentServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseContentServiceRestTransport._BaseGetOperation, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseContentServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseContentServiceRestTransport._BaseListOperations, ContentServiceRestStub): + def __hash__(self): + return hash("ContentServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseContentServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseContentServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseContentServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ContentServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.ContentService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ContentServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py new file mode 100644 index 000000000000..eab30664870c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseContentServiceRestTransport(ContentServiceTransport): + """Base REST backend transport for ContentService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/contentitems', + 'body': 'content', + }, + { + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/content', + 'body': 'content', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcd_content.CreateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseCreateContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/content/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = content.DeleteContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseDeleteContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/content/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = content.GetContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseGetContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:getIamPolicy', + }, + { + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:getIamPolicy', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/contentitems', + }, + { + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/content', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = content.ListContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseListContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{content.name=projects/*/locations/*/lakes/*/contentitems/**}', + 'body': 'content', + }, + { + 'method': 'patch', + 'uri': '/v1/{content.name=projects/*/locations/*/lakes/*/content/**}', + 'body': 'content', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcd_content.UpdateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseContentServiceRestTransport._BaseUpdateContent._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseContentServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py new file mode 100644 index 000000000000..0fa571256762 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataScanServiceClient +from .async_client import DataScanServiceAsyncClient + +__all__ = ( + 'DataScanServiceClient', + 'DataScanServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py new file mode 100644 index 000000000000..9e4c31893087 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -0,0 +1,1755 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport +from .client import DataScanServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DataScanServiceAsyncClient: + """DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + """ + + _client: DataScanServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataScanServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE + + connection_path = staticmethod(DataScanServiceClient.connection_path) + parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) + data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) + parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) + data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) + parse_data_scan_job_path = staticmethod(DataScanServiceClient.parse_data_scan_job_path) + dataset_path = staticmethod(DataScanServiceClient.dataset_path) + parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) + entity_path = staticmethod(DataScanServiceClient.entity_path) + parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) + common_billing_account_path = staticmethod(DataScanServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataScanServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataScanServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataScanServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataScanServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataScanServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataScanServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataScanServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataScanServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataScanServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceAsyncClient: The constructed client. + """ + return DataScanServiceClient.from_service_account_info.__func__(DataScanServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceAsyncClient: The constructed client. + """ + return DataScanServiceClient.from_service_account_file.__func__(DataScanServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataScanServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataScanServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataScanServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataScanServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data scan service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataScanServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataScanServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "credentialsType": None, + } + ) + + async def create_data_scan(self, + request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_scan: Optional[datascans.DataScan] = None, + data_scan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]]): + The request object. Create dataScan request. + parent (:class:`str`): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): + Required. DataScan resource. + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan_id (:class:`str`): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + + This corresponds to the ``data_scan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_scan, data_scan_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.CreateDataScanRequest): + request = datascans.CreateDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_scan is not None: + request.data_scan = data_scan + if data_scan_id is not None: + request.data_scan_id = data_scan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_scan(self, + request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, + *, + data_scan: Optional[datascans.DataScan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]]): + The request object. Update dataScan request. + data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_scan, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.UpdateDataScanRequest): + request = datascans.UpdateDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_scan is not None: + request.data_scan = data_scan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_scan.name", request.data_scan.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_scan(self, + request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]]): + The request object. Delete dataScan request. + name (:class:`str`): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.DeleteDataScanRequest): + request = datascans.DeleteDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_data_scan(self, + request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.DataScan: + r"""Gets a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]]): + The request object. Get dataScan request. + name (:class:`str`): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataScan: + Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanRequest): + request = datascans.GetDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_scans(self, + request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataScansAsyncPager: + r"""Lists DataScans. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]]): + The request object. List dataScans request. + parent (:class:`str`): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager: + List dataScans response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScansRequest): + request = datascans.ListDataScansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataScansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_data_scan(self, + request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.RunDataScanResponse: + r"""Runs an on-demand execution of a DataScan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.run_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]]): + The request object. Run DataScan Request + name (:class:`str`): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + Only **OnDemand** data scans are allowed. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.RunDataScanResponse: + Run DataScan Response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.RunDataScanRequest): + request = datascans.RunDataScanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_scan_job(self, + request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.DataScanJob: + r"""Gets a DataScanJob resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]]): + The request object. Get DataScanJob request. + name (:class:`str`): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataScanJob: + A DataScanJob represents an instance + of DataScan execution. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanJobRequest): + request = datascans.GetDataScanJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_scan_jobs(self, + request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataScanJobsAsyncPager: + r"""Lists DataScanJobs under the given DataScan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]]): + The request object. List DataScanJobs request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager: + List DataScanJobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScanJobsRequest): + request = datascans.ListDataScanJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scan_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataScanJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_data_quality_rules(self, + request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.GenerateDataQualityRulesResponse: + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]]): + The request object. Request details for generating data + quality rule recommendations. + name (:class:`str`): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling job + (a data scan job where the job type is data profiling) + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: + Response details for data quality + rule recommendations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GenerateDataQualityRulesRequest): + request = datascans.GenerateDataQualityRulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_data_quality_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataScanServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DataScanServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py new file mode 100644 index 000000000000..a37c5519e9b5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -0,0 +1,2154 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataScanServiceGrpcTransport +from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport +from .transports.rest import DataScanServiceRestTransport + + +class DataScanServiceClientMeta(type): + """Metaclass for the DataScanService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] + _transport_registry["grpc"] = DataScanServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataScanServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataScanServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataScanServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataScanServiceClient(metaclass=DataScanServiceClientMeta): + """DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataScanServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataScanServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataScanServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_path(project: str,location: str,connection: str,) -> str: + """Returns a fully-qualified connection string.""" + return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str,str]: + """Parses a connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_scan_path(project: str,location: str,dataScan: str,) -> str: + """Returns a fully-qualified data_scan string.""" + return "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) + + @staticmethod + def parse_data_scan_path(path: str) -> Dict[str,str]: + """Parses a data_scan path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_scan_job_path(project: str,location: str,dataScan: str,job: str,) -> str: + """Returns a fully-qualified data_scan_job string.""" + return "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) + + @staticmethod + def parse_data_scan_job_path(path: str) -> Dict[str,str]: + """Parses a data_scan_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dataset_path(project: str,dataset: str,) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str,str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: + """Returns a fully-qualified entity string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + + @staticmethod + def parse_entity_path(path: str) -> Dict[str,str]: + """Parses a entity path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataScanServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data scan service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataScanServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataScanServiceClient._read_environment_variables() + self._client_cert_source = DataScanServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataScanServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataScanServiceTransport) + if transport_provided: + # transport is a DataScanServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataScanServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataScanServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport]] = ( + DataScanServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataScanServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataScanServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "credentialsType": None, + } + ) + + def create_data_scan(self, + request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_scan: Optional[datascans.DataScan] = None, + data_scan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]): + The request object. Create dataScan request. + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource. + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_scan_id (str): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + + This corresponds to the ``data_scan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_scan, data_scan_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.CreateDataScanRequest): + request = datascans.CreateDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_scan is not None: + request.data_scan = data_scan + if data_scan_id is not None: + request.data_scan_id = data_scan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_scan(self, + request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, + *, + data_scan: Optional[datascans.DataScan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]): + The request object. Update dataScan request. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + + This corresponds to the ``data_scan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_scan, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.UpdateDataScanRequest): + request = datascans.UpdateDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_scan is not None: + request.data_scan = data_scan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_scan.name", request.data_scan.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datascans.DataScan, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_scan(self, + request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]): + The request object. Delete dataScan request. + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.DeleteDataScanRequest): + request = datascans.DeleteDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_data_scan(self, + request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.DataScan: + r"""Gets a DataScan resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]): + The request object. Get dataScan request. + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataScan: + Represents a user-visible job which provides the insights for the related + data source. + + For example: + + - Data quality: generates queries based on the rules + and runs against the data to get data quality check + results. For more information, see [Auto data + quality + overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see + [About data + profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage buckets + to extract and then catalog metadata. For more + information, see [Discover and catalog Cloud + Storage + data](https://cloud.google.com/bigquery/docs/automatic-discovery). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanRequest): + request = datascans.GetDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_scans(self, + request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataScansPager: + r"""Lists DataScans. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]): + The request object. List dataScans request. + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* + and ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager: + List dataScans response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScansRequest): + request = datascans.ListDataScansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_scans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataScansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_data_scan(self, + request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.RunDataScanResponse: + r"""Runs an on-demand execution of a DataScan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.run_data_scan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]): + The request object. Run DataScan Request + name (str): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + Only **OnDemand** data scans are allowed. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.RunDataScanResponse: + Run DataScan Response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.RunDataScanRequest): + request = datascans.RunDataScanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_data_scan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_scan_job(self, + request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.DataScanJob: + r"""Gets a DataScanJob resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]): + The request object. Get DataScanJob request. + name (str): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataScanJob: + A DataScanJob represents an instance + of DataScan execution. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GetDataScanJobRequest): + request = datascans.GetDataScanJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_scan_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_scan_jobs(self, + request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataScanJobsPager: + r"""Lists DataScanJobs under the given DataScan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]): + The request object. List DataScanJobs request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager: + List DataScanJobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.ListDataScanJobsRequest): + request = datascans.ListDataScanJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_scan_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataScanJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_data_quality_rules(self, + request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datascans.GenerateDataQualityRulesResponse: + r"""Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]): + The request object. Request details for generating data + quality rule recommendations. + name (str): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling job + (a data scan job where the job type is data profiling) + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: + Response details for data quality + rule recommendations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datascans.GenerateDataQualityRulesRequest): + request = datascans.GenerateDataQualityRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_data_quality_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataScanServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DataScanServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py new file mode 100644 index 000000000000..9e99ef310bf3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import datascans + + +class ListDataScansPager: + """A pager for iterating through ``list_data_scans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_scans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataScans`` requests and continue to iterate + through the ``data_scans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datascans.ListDataScansResponse], + request: datascans.ListDataScansRequest, + response: datascans.ListDataScansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScansRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datascans.ListDataScansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datascans.ListDataScansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datascans.DataScan]: + for page in self.pages: + yield from page.data_scans + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScansAsyncPager: + """A pager for iterating through ``list_data_scans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_scans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataScans`` requests and continue to iterate + through the ``data_scans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datascans.ListDataScansResponse]], + request: datascans.ListDataScansRequest, + response: datascans.ListDataScansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScansRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datascans.ListDataScansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datascans.ListDataScansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datascans.DataScan]: + async def async_generator(): + async for page in self.pages: + for response in page.data_scans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScanJobsPager: + """A pager for iterating through ``list_data_scan_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_scan_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataScanJobs`` requests and continue to iterate + through the ``data_scan_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datascans.ListDataScanJobsResponse], + request: datascans.ListDataScanJobsRequest, + response: datascans.ListDataScanJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datascans.ListDataScanJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datascans.ListDataScanJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datascans.DataScanJob]: + for page in self.pages: + yield from page.data_scan_jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataScanJobsAsyncPager: + """A pager for iterating through ``list_data_scan_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_scan_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataScanJobs`` requests and continue to iterate + through the ``data_scan_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datascans.ListDataScanJobsResponse]], + request: datascans.ListDataScanJobsRequest, + response: datascans.ListDataScanJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datascans.ListDataScanJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datascans.ListDataScanJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datascans.DataScanJob]: + async def async_generator(): + async for page in self.pages: + for response in page.data_scan_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst new file mode 100644 index 000000000000..e27965be0a36 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataScanServiceTransport` is the ABC for all transports. +- public child `DataScanServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataScanServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataScanServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataScanServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py new file mode 100644 index 000000000000..223ef82cfff0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataScanServiceTransport +from .grpc import DataScanServiceGrpcTransport +from .grpc_asyncio import DataScanServiceGrpcAsyncIOTransport +from .rest import DataScanServiceRestTransport +from .rest import DataScanServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] +_transport_registry['grpc'] = DataScanServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataScanServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DataScanServiceRestTransport + +__all__ = ( + 'DataScanServiceTransport', + 'DataScanServiceGrpcTransport', + 'DataScanServiceGrpcAsyncIOTransport', + 'DataScanServiceRestTransport', + 'DataScanServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py new file mode 100644 index 000000000000..cafedfaf10bd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataScanServiceTransport(abc.ABC): + """Abstract transport class for DataScanService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_scan: gapic_v1.method.wrap_method( + self.create_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.update_data_scan: gapic_v1.method.wrap_method( + self.update_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_scan: gapic_v1.method.wrap_method( + self.delete_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan: gapic_v1.method.wrap_method( + self.get_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scans: gapic_v1.method.wrap_method( + self.list_data_scans, + default_timeout=None, + client_info=client_info, + ), + self.run_data_scan: gapic_v1.method.wrap_method( + self.run_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan_job: gapic_v1.method.wrap_method( + self.get_data_scan_job, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scan_jobs: gapic_v1.method.wrap_method( + self.list_data_scan_jobs, + default_timeout=None, + client_info=client_info, + ), + self.generate_data_quality_rules: gapic_v1.method.wrap_method( + self.generate_data_quality_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + Union[ + datascans.DataScan, + Awaitable[datascans.DataScan] + ]]: + raise NotImplementedError() + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + Union[ + datascans.ListDataScansResponse, + Awaitable[datascans.ListDataScansResponse] + ]]: + raise NotImplementedError() + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + Union[ + datascans.RunDataScanResponse, + Awaitable[datascans.RunDataScanResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + Union[ + datascans.DataScanJob, + Awaitable[datascans.DataScanJob] + ]]: + raise NotImplementedError() + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + Union[ + datascans.ListDataScanJobsResponse, + Awaitable[datascans.ListDataScanJobsResponse] + ]]: + raise NotImplementedError() + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + Union[ + datascans.GenerateDataQualityRulesResponse, + Awaitable[datascans.GenerateDataQualityRulesResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataScanServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py new file mode 100644 index 000000000000..3424a6c32dc9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py @@ -0,0 +1,691 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataScanServiceGrpcTransport(DataScanServiceTransport): + """gRPC backend transport for DataScanService. + + DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data scan method over gRPC. + + Creates a DataScan resource. + + Returns: + Callable[[~.CreateDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_scan' not in self._stubs: + self._stubs['create_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', + request_serializer=datascans.CreateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_scan'] + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data scan method over gRPC. + + Updates a DataScan resource. + + Returns: + Callable[[~.UpdateDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_scan' not in self._stubs: + self._stubs['update_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', + request_serializer=datascans.UpdateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_scan'] + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data scan method over gRPC. + + Deletes a DataScan resource. + + Returns: + Callable[[~.DeleteDataScanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_scan' not in self._stubs: + self._stubs['delete_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', + request_serializer=datascans.DeleteDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_scan'] + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + datascans.DataScan]: + r"""Return a callable for the get data scan method over gRPC. + + Gets a DataScan resource. + + Returns: + Callable[[~.GetDataScanRequest], + ~.DataScan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan' not in self._stubs: + self._stubs['get_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScan', + request_serializer=datascans.GetDataScanRequest.serialize, + response_deserializer=datascans.DataScan.deserialize, + ) + return self._stubs['get_data_scan'] + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + datascans.ListDataScansResponse]: + r"""Return a callable for the list data scans method over gRPC. + + Lists DataScans. + + Returns: + Callable[[~.ListDataScansRequest], + ~.ListDataScansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scans' not in self._stubs: + self._stubs['list_data_scans'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScans', + request_serializer=datascans.ListDataScansRequest.serialize, + response_deserializer=datascans.ListDataScansResponse.deserialize, + ) + return self._stubs['list_data_scans'] + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + datascans.RunDataScanResponse]: + r"""Return a callable for the run data scan method over gRPC. + + Runs an on-demand execution of a DataScan + + Returns: + Callable[[~.RunDataScanRequest], + ~.RunDataScanResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_data_scan' not in self._stubs: + self._stubs['run_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/RunDataScan', + request_serializer=datascans.RunDataScanRequest.serialize, + response_deserializer=datascans.RunDataScanResponse.deserialize, + ) + return self._stubs['run_data_scan'] + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + datascans.DataScanJob]: + r"""Return a callable for the get data scan job method over gRPC. + + Gets a DataScanJob resource. + + Returns: + Callable[[~.GetDataScanJobRequest], + ~.DataScanJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan_job' not in self._stubs: + self._stubs['get_data_scan_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', + request_serializer=datascans.GetDataScanJobRequest.serialize, + response_deserializer=datascans.DataScanJob.deserialize, + ) + return self._stubs['get_data_scan_job'] + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + datascans.ListDataScanJobsResponse]: + r"""Return a callable for the list data scan jobs method over gRPC. + + Lists DataScanJobs under the given DataScan. + + Returns: + Callable[[~.ListDataScanJobsRequest], + ~.ListDataScanJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scan_jobs' not in self._stubs: + self._stubs['list_data_scan_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', + request_serializer=datascans.ListDataScanJobsRequest.serialize, + response_deserializer=datascans.ListDataScanJobsResponse.deserialize, + ) + return self._stubs['list_data_scan_jobs'] + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + datascans.GenerateDataQualityRulesResponse]: + r"""Return a callable for the generate data quality rules method over gRPC. + + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + Returns: + Callable[[~.GenerateDataQualityRulesRequest], + ~.GenerateDataQualityRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_data_quality_rules' not in self._stubs: + self._stubs['generate_data_quality_rules'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', + request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, + response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, + ) + return self._stubs['generate_data_quality_rules'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataScanServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6a450d6f8511 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py @@ -0,0 +1,782 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import datascans +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataScanServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataScanServiceGrpcAsyncIOTransport(DataScanServiceTransport): + """gRPC AsyncIO backend transport for DataScanService. + + DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data scan method over gRPC. + + Creates a DataScan resource. + + Returns: + Callable[[~.CreateDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_scan' not in self._stubs: + self._stubs['create_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', + request_serializer=datascans.CreateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_scan'] + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data scan method over gRPC. + + Updates a DataScan resource. + + Returns: + Callable[[~.UpdateDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_scan' not in self._stubs: + self._stubs['update_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', + request_serializer=datascans.UpdateDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_scan'] + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data scan method over gRPC. + + Deletes a DataScan resource. + + Returns: + Callable[[~.DeleteDataScanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_scan' not in self._stubs: + self._stubs['delete_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', + request_serializer=datascans.DeleteDataScanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_scan'] + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + Awaitable[datascans.DataScan]]: + r"""Return a callable for the get data scan method over gRPC. + + Gets a DataScan resource. + + Returns: + Callable[[~.GetDataScanRequest], + Awaitable[~.DataScan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan' not in self._stubs: + self._stubs['get_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScan', + request_serializer=datascans.GetDataScanRequest.serialize, + response_deserializer=datascans.DataScan.deserialize, + ) + return self._stubs['get_data_scan'] + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + Awaitable[datascans.ListDataScansResponse]]: + r"""Return a callable for the list data scans method over gRPC. + + Lists DataScans. + + Returns: + Callable[[~.ListDataScansRequest], + Awaitable[~.ListDataScansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scans' not in self._stubs: + self._stubs['list_data_scans'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScans', + request_serializer=datascans.ListDataScansRequest.serialize, + response_deserializer=datascans.ListDataScansResponse.deserialize, + ) + return self._stubs['list_data_scans'] + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + Awaitable[datascans.RunDataScanResponse]]: + r"""Return a callable for the run data scan method over gRPC. + + Runs an on-demand execution of a DataScan + + Returns: + Callable[[~.RunDataScanRequest], + Awaitable[~.RunDataScanResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_data_scan' not in self._stubs: + self._stubs['run_data_scan'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/RunDataScan', + request_serializer=datascans.RunDataScanRequest.serialize, + response_deserializer=datascans.RunDataScanResponse.deserialize, + ) + return self._stubs['run_data_scan'] + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + Awaitable[datascans.DataScanJob]]: + r"""Return a callable for the get data scan job method over gRPC. + + Gets a DataScanJob resource. + + Returns: + Callable[[~.GetDataScanJobRequest], + Awaitable[~.DataScanJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_scan_job' not in self._stubs: + self._stubs['get_data_scan_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', + request_serializer=datascans.GetDataScanJobRequest.serialize, + response_deserializer=datascans.DataScanJob.deserialize, + ) + return self._stubs['get_data_scan_job'] + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + Awaitable[datascans.ListDataScanJobsResponse]]: + r"""Return a callable for the list data scan jobs method over gRPC. + + Lists DataScanJobs under the given DataScan. + + Returns: + Callable[[~.ListDataScanJobsRequest], + Awaitable[~.ListDataScanJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_scan_jobs' not in self._stubs: + self._stubs['list_data_scan_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', + request_serializer=datascans.ListDataScanJobsRequest.serialize, + response_deserializer=datascans.ListDataScanJobsResponse.deserialize, + ) + return self._stubs['list_data_scan_jobs'] + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + Awaitable[datascans.GenerateDataQualityRulesResponse]]: + r"""Return a callable for the generate data quality rules method over gRPC. + + Generates recommended data quality rules based on the + results of a data profiling scan. + + Use the recommendations to build rules for a data + quality scan. + + Returns: + Callable[[~.GenerateDataQualityRulesRequest], + Awaitable[~.GenerateDataQualityRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_data_quality_rules' not in self._stubs: + self._stubs['generate_data_quality_rules'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', + request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, + response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, + ) + return self._stubs['generate_data_quality_rules'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_scan: self._wrap_method( + self.create_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.update_data_scan: self._wrap_method( + self.update_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_scan: self._wrap_method( + self.delete_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan: self._wrap_method( + self.get_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scans: self._wrap_method( + self.list_data_scans, + default_timeout=None, + client_info=client_info, + ), + self.run_data_scan: self._wrap_method( + self.run_data_scan, + default_timeout=None, + client_info=client_info, + ), + self.get_data_scan_job: self._wrap_method( + self.get_data_scan_job, + default_timeout=None, + client_info=client_info, + ), + self.list_data_scan_jobs: self._wrap_method( + self.list_data_scan_jobs, + default_timeout=None, + client_info=client_info, + ), + self.generate_data_quality_rules: self._wrap_method( + self.generate_data_quality_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataScanServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py new file mode 100644 index 000000000000..467803d26e62 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py @@ -0,0 +1,2620 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import datascans +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseDataScanServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataScanServiceRestInterceptor: + """Interceptor for DataScanService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataScanServiceRestTransport. + + .. code-block:: python + class MyCustomDataScanServiceInterceptor(DataScanServiceRestInterceptor): + def pre_create_data_scan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_scan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_scan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_scan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_data_quality_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_data_quality_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_scan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_scan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_scan_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_scan_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_scan_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_scan_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_scans(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_scans(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_data_scan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_data_scan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_scan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_scan(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataScanServiceRestTransport(interceptor=MyCustomDataScanServiceInterceptor()) + client = DataScanServiceClient(transport=transport) + + + """ + def pre_create_data_scan(self, request: datascans.CreateDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.CreateDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_data_scan + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_create_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_scan + + DEPRECATED. Please use the `post_create_data_scan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_create_data_scan` interceptor runs + before the `post_create_data_scan_with_metadata` interceptor. + """ + return response + + def post_create_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_scan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_create_data_scan_with_metadata` + interceptor in new development instead of the `post_create_data_scan` interceptor. + When both interceptors are used, this `post_create_data_scan_with_metadata` interceptor runs after the + `post_create_data_scan` interceptor. The (possibly modified) response returned by + `post_create_data_scan` will be passed to + `post_create_data_scan_with_metadata`. + """ + return response, metadata + + def pre_delete_data_scan(self, request: datascans.DeleteDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DeleteDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_data_scan + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_delete_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_scan + + DEPRECATED. Please use the `post_delete_data_scan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_delete_data_scan` interceptor runs + before the `post_delete_data_scan_with_metadata` interceptor. + """ + return response + + def post_delete_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_scan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_delete_data_scan_with_metadata` + interceptor in new development instead of the `post_delete_data_scan` interceptor. + When both interceptors are used, this `post_delete_data_scan_with_metadata` interceptor runs after the + `post_delete_data_scan` interceptor. The (possibly modified) response returned by + `post_delete_data_scan` will be passed to + `post_delete_data_scan_with_metadata`. + """ + return response, metadata + + def pre_generate_data_quality_rules(self, request: datascans.GenerateDataQualityRulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GenerateDataQualityRulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for generate_data_quality_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_generate_data_quality_rules(self, response: datascans.GenerateDataQualityRulesResponse) -> datascans.GenerateDataQualityRulesResponse: + """Post-rpc interceptor for generate_data_quality_rules + + DEPRECATED. Please use the `post_generate_data_quality_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_generate_data_quality_rules` interceptor runs + before the `post_generate_data_quality_rules_with_metadata` interceptor. + """ + return response + + def post_generate_data_quality_rules_with_metadata(self, response: datascans.GenerateDataQualityRulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GenerateDataQualityRulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_data_quality_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_generate_data_quality_rules_with_metadata` + interceptor in new development instead of the `post_generate_data_quality_rules` interceptor. + When both interceptors are used, this `post_generate_data_quality_rules_with_metadata` interceptor runs after the + `post_generate_data_quality_rules` interceptor. The (possibly modified) response returned by + `post_generate_data_quality_rules` will be passed to + `post_generate_data_quality_rules_with_metadata`. + """ + return response, metadata + + def pre_get_data_scan(self, request: datascans.GetDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GetDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_scan + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_get_data_scan(self, response: datascans.DataScan) -> datascans.DataScan: + """Post-rpc interceptor for get_data_scan + + DEPRECATED. Please use the `post_get_data_scan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_get_data_scan` interceptor runs + before the `post_get_data_scan_with_metadata` interceptor. + """ + return response + + def post_get_data_scan_with_metadata(self, response: datascans.DataScan, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DataScan, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_scan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_get_data_scan_with_metadata` + interceptor in new development instead of the `post_get_data_scan` interceptor. + When both interceptors are used, this `post_get_data_scan_with_metadata` interceptor runs after the + `post_get_data_scan` interceptor. The (possibly modified) response returned by + `post_get_data_scan` will be passed to + `post_get_data_scan_with_metadata`. + """ + return response, metadata + + def pre_get_data_scan_job(self, request: datascans.GetDataScanJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GetDataScanJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_scan_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_get_data_scan_job(self, response: datascans.DataScanJob) -> datascans.DataScanJob: + """Post-rpc interceptor for get_data_scan_job + + DEPRECATED. Please use the `post_get_data_scan_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_get_data_scan_job` interceptor runs + before the `post_get_data_scan_job_with_metadata` interceptor. + """ + return response + + def post_get_data_scan_job_with_metadata(self, response: datascans.DataScanJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DataScanJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_scan_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_get_data_scan_job_with_metadata` + interceptor in new development instead of the `post_get_data_scan_job` interceptor. + When both interceptors are used, this `post_get_data_scan_job_with_metadata` interceptor runs after the + `post_get_data_scan_job` interceptor. The (possibly modified) response returned by + `post_get_data_scan_job` will be passed to + `post_get_data_scan_job_with_metadata`. + """ + return response, metadata + + def pre_list_data_scan_jobs(self, request: datascans.ListDataScanJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScanJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_data_scan_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_list_data_scan_jobs(self, response: datascans.ListDataScanJobsResponse) -> datascans.ListDataScanJobsResponse: + """Post-rpc interceptor for list_data_scan_jobs + + DEPRECATED. Please use the `post_list_data_scan_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_list_data_scan_jobs` interceptor runs + before the `post_list_data_scan_jobs_with_metadata` interceptor. + """ + return response + + def post_list_data_scan_jobs_with_metadata(self, response: datascans.ListDataScanJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScanJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_data_scan_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_list_data_scan_jobs_with_metadata` + interceptor in new development instead of the `post_list_data_scan_jobs` interceptor. + When both interceptors are used, this `post_list_data_scan_jobs_with_metadata` interceptor runs after the + `post_list_data_scan_jobs` interceptor. The (possibly modified) response returned by + `post_list_data_scan_jobs` will be passed to + `post_list_data_scan_jobs_with_metadata`. + """ + return response, metadata + + def pre_list_data_scans(self, request: datascans.ListDataScansRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScansRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_data_scans + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_list_data_scans(self, response: datascans.ListDataScansResponse) -> datascans.ListDataScansResponse: + """Post-rpc interceptor for list_data_scans + + DEPRECATED. Please use the `post_list_data_scans_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_list_data_scans` interceptor runs + before the `post_list_data_scans_with_metadata` interceptor. + """ + return response + + def post_list_data_scans_with_metadata(self, response: datascans.ListDataScansResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScansResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_data_scans + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_list_data_scans_with_metadata` + interceptor in new development instead of the `post_list_data_scans` interceptor. + When both interceptors are used, this `post_list_data_scans_with_metadata` interceptor runs after the + `post_list_data_scans` interceptor. The (possibly modified) response returned by + `post_list_data_scans` will be passed to + `post_list_data_scans_with_metadata`. + """ + return response, metadata + + def pre_run_data_scan(self, request: datascans.RunDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.RunDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for run_data_scan + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_run_data_scan(self, response: datascans.RunDataScanResponse) -> datascans.RunDataScanResponse: + """Post-rpc interceptor for run_data_scan + + DEPRECATED. Please use the `post_run_data_scan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_run_data_scan` interceptor runs + before the `post_run_data_scan_with_metadata` interceptor. + """ + return response + + def post_run_data_scan_with_metadata(self, response: datascans.RunDataScanResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.RunDataScanResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_data_scan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_run_data_scan_with_metadata` + interceptor in new development instead of the `post_run_data_scan` interceptor. + When both interceptors are used, this `post_run_data_scan_with_metadata` interceptor runs after the + `post_run_data_scan` interceptor. The (possibly modified) response returned by + `post_run_data_scan` will be passed to + `post_run_data_scan_with_metadata`. + """ + return response, metadata + + def pre_update_data_scan(self, request: datascans.UpdateDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.UpdateDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_data_scan + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_update_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_scan + + DEPRECATED. Please use the `post_update_data_scan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. This `post_update_data_scan` interceptor runs + before the `post_update_data_scan_with_metadata` interceptor. + """ + return response + + def post_update_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_scan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataScanService server but before it is returned to user code. + + We recommend only using this `post_update_data_scan_with_metadata` + interceptor in new development instead of the `post_update_data_scan` interceptor. + When both interceptors are used, this `post_update_data_scan_with_metadata` interceptor runs after the + `post_update_data_scan` interceptor. The (possibly modified) response returned by + `post_update_data_scan` will be passed to + `post_update_data_scan_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataScanService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataScanService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataScanServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataScanServiceRestInterceptor + + +class DataScanServiceRestTransport(_BaseDataScanServiceRestTransport): + """REST backend synchronous transport for DataScanService. + + DataScanService manages DataScan resources which can be + configured to run various types of data scanning workload and + generate enriched metadata (e.g. Data Profile, Data Quality) for + the data source. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DataScanServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataScanServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateDataScan(_BaseDataScanServiceRestTransport._BaseCreateDataScan, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.CreateDataScan") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: datascans.CreateDataScanRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create data scan method over HTTP. + + Args: + request (~.datascans.CreateDataScanRequest): + The request object. Create dataScan request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_http_options() + + request, metadata = self._interceptor.pre_create_data_scan(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_transcoded_request(http_options, request) + + body = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.CreateDataScan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "CreateDataScan", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._CreateDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_scan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_scan_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "CreateDataScan", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDataScan(_BaseDataScanServiceRestTransport._BaseDeleteDataScan, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.DeleteDataScan") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: datascans.DeleteDataScanRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete data scan method over HTTP. + + Args: + request (~.datascans.DeleteDataScanRequest): + The request object. Delete dataScan request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_scan(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.DeleteDataScan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "DeleteDataScan", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._DeleteDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_data_scan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_scan_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "DeleteDataScan", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GenerateDataQualityRules(_BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.GenerateDataQualityRules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: datascans.GenerateDataQualityRulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.GenerateDataQualityRulesResponse: + r"""Call the generate data quality + rules method over HTTP. + + Args: + request (~.datascans.GenerateDataQualityRulesRequest): + The request object. Request details for generating data + quality rule recommendations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.GenerateDataQualityRulesResponse: + Response details for data quality + rule recommendations. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_http_options() + + request, metadata = self._interceptor.pre_generate_data_quality_rules(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_transcoded_request(http_options, request) + + body = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GenerateDataQualityRules", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GenerateDataQualityRules", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._GenerateDataQualityRules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.GenerateDataQualityRulesResponse() + pb_resp = datascans.GenerateDataQualityRulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_generate_data_quality_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_data_quality_rules_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.GenerateDataQualityRulesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GenerateDataQualityRules", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataScan(_BaseDataScanServiceRestTransport._BaseGetDataScan, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.GetDataScan") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: datascans.GetDataScanRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.DataScan: + r"""Call the get data scan method over HTTP. + + Args: + request (~.datascans.GetDataScanRequest): + The request object. Get dataScan request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.DataScan: + Represents a user-visible job which provides the + insights for the related data source. + + For example: + + - Data quality: generates queries based on the rules and + runs against the data to get data quality check + results. For more information, see `Auto data quality + overview `__. + - Data profile: analyzes the data in tables and + generates insights about the structure, content and + relationships (such as null percent, cardinality, + min/max/mean, etc). For more information, see `About + data + profiling `__. + - Data discovery: scans data in Cloud Storage buckets to + extract and then catalog metadata. For more + information, see `Discover and catalog Cloud Storage + data `__. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_http_options() + + request, metadata = self._interceptor.pre_get_data_scan(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetDataScan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetDataScan", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._GetDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.DataScan() + pb_resp = datascans.DataScan.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_scan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_scan_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.DataScan.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetDataScan", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataScanJob(_BaseDataScanServiceRestTransport._BaseGetDataScanJob, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.GetDataScanJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: datascans.GetDataScanJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.DataScanJob: + r"""Call the get data scan job method over HTTP. + + Args: + request (~.datascans.GetDataScanJobRequest): + The request object. Get DataScanJob request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.DataScanJob: + A DataScanJob represents an instance + of DataScan execution. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_http_options() + + request, metadata = self._interceptor.pre_get_data_scan_job(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetDataScanJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetDataScanJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._GetDataScanJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.DataScanJob() + pb_resp = datascans.DataScanJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_scan_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_scan_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.DataScanJob.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetDataScanJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataScanJobs(_BaseDataScanServiceRestTransport._BaseListDataScanJobs, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.ListDataScanJobs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: datascans.ListDataScanJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.ListDataScanJobsResponse: + r"""Call the list data scan jobs method over HTTP. + + Args: + request (~.datascans.ListDataScanJobsRequest): + The request object. List DataScanJobs request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.ListDataScanJobsResponse: + List DataScanJobs response. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_http_options() + + request, metadata = self._interceptor.pre_list_data_scan_jobs(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListDataScanJobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListDataScanJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._ListDataScanJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.ListDataScanJobsResponse() + pb_resp = datascans.ListDataScanJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_scan_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_scan_jobs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.ListDataScanJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListDataScanJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataScans(_BaseDataScanServiceRestTransport._BaseListDataScans, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.ListDataScans") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: datascans.ListDataScansRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.ListDataScansResponse: + r"""Call the list data scans method over HTTP. + + Args: + request (~.datascans.ListDataScansRequest): + The request object. List dataScans request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.ListDataScansResponse: + List dataScans response. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseListDataScans._get_http_options() + + request, metadata = self._interceptor.pre_list_data_scans(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseListDataScans._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseListDataScans._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListDataScans", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListDataScans", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._ListDataScans._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.ListDataScansResponse() + pb_resp = datascans.ListDataScansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_scans(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_scans_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.ListDataScansResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListDataScans", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RunDataScan(_BaseDataScanServiceRestTransport._BaseRunDataScan, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.RunDataScan") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: datascans.RunDataScanRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> datascans.RunDataScanResponse: + r"""Call the run data scan method over HTTP. + + Args: + request (~.datascans.RunDataScanRequest): + The request object. Run DataScan Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datascans.RunDataScanResponse: + Run DataScan Response. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_http_options() + + request, metadata = self._interceptor.pre_run_data_scan(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_transcoded_request(http_options, request) + + body = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.RunDataScan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "RunDataScan", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._RunDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datascans.RunDataScanResponse() + pb_resp = datascans.RunDataScanResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_run_data_scan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_data_scan_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = datascans.RunDataScanResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "RunDataScan", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataScan(_BaseDataScanServiceRestTransport._BaseUpdateDataScan, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.UpdateDataScan") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: datascans.UpdateDataScanRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update data scan method over HTTP. + + Args: + request (~.datascans.UpdateDataScanRequest): + The request object. Update dataScan request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_http_options() + + request, metadata = self._interceptor.pre_update_data_scan(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_transcoded_request(http_options, request) + + body = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.UpdateDataScan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "UpdateDataScan", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._UpdateDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_scan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_scan_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "UpdateDataScan", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_data_scan(self) -> Callable[ + [datascans.CreateDataScanRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataScan(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_scan(self) -> Callable[ + [datascans.DeleteDataScanRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataScan(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_data_quality_rules(self) -> Callable[ + [datascans.GenerateDataQualityRulesRequest], + datascans.GenerateDataQualityRulesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateDataQualityRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_scan(self) -> Callable[ + [datascans.GetDataScanRequest], + datascans.DataScan]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataScan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_scan_job(self) -> Callable[ + [datascans.GetDataScanJobRequest], + datascans.DataScanJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataScanJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_scan_jobs(self) -> Callable[ + [datascans.ListDataScanJobsRequest], + datascans.ListDataScanJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataScanJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_scans(self) -> Callable[ + [datascans.ListDataScansRequest], + datascans.ListDataScansResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataScans(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_data_scan(self) -> Callable[ + [datascans.RunDataScanRequest], + datascans.RunDataScanResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunDataScan(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_scan(self) -> Callable[ + [datascans.UpdateDataScanRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataScan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseDataScanServiceRestTransport._BaseGetLocation, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseDataScanServiceRestTransport._BaseListLocations, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseDataScanServiceRestTransport._BaseCancelOperation, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseDataScanServiceRestTransport._BaseDeleteOperation, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseDataScanServiceRestTransport._BaseGetOperation, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseDataScanServiceRestTransport._BaseListOperations, DataScanServiceRestStub): + def __hash__(self): + return hash("DataScanServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDataScanServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDataScanServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataScanServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataScanServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataScanService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DataScanServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py new file mode 100644 index 000000000000..b099e32c902a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py @@ -0,0 +1,645 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import datascans +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDataScanServiceRestTransport(DataScanServiceTransport): + """Base REST backend transport for DataScanService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateDataScan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataScanId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/dataScans', + 'body': 'data_scan', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.CreateDataScanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseCreateDataScan._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataScan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.DeleteDataScanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateDataQualityRules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}:generateDataQualityRules', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}:generateDataQualityRules', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.GenerateDataQualityRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataScan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.GetDataScanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseGetDataScan._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataScanJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.GetDataScanJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataScanJobs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/dataScans/*}/jobs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.ListDataScanJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataScans: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/dataScans', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.ListDataScansRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseListDataScans._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunDataScan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}:run', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.RunDataScanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseRunDataScan._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataScan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{data_scan.name=projects/*/locations/*/dataScans/*}', + 'body': 'data_scan', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datascans.UpdateDataScanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseDataScanServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py new file mode 100644 index 000000000000..68e09c57de83 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataTaxonomyServiceClient +from .async_client import DataTaxonomyServiceAsyncClient + +__all__ = ( + 'DataTaxonomyServiceClient', + 'DataTaxonomyServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py new file mode 100644 index 000000000000..b30b9bf94e01 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -0,0 +1,2543 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport +from .client import DataTaxonomyServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DataTaxonomyServiceAsyncClient: + """DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + """ + + _client: DataTaxonomyServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + + data_attribute_path = staticmethod(DataTaxonomyServiceClient.data_attribute_path) + parse_data_attribute_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_path) + data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.data_attribute_binding_path) + parse_data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_binding_path) + data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.data_taxonomy_path) + parse_data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.parse_data_taxonomy_path) + common_billing_account_path = staticmethod(DataTaxonomyServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataTaxonomyServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataTaxonomyServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataTaxonomyServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataTaxonomyServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataTaxonomyServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataTaxonomyServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataTaxonomyServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataTaxonomyServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataTaxonomyServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceAsyncClient: The constructed client. + """ + return DataTaxonomyServiceClient.from_service_account_info.__func__(DataTaxonomyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceAsyncClient: The constructed client. + """ + return DataTaxonomyServiceClient.from_service_account_file.__func__(DataTaxonomyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataTaxonomyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataTaxonomyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTaxonomyServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataTaxonomyServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data taxonomy service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataTaxonomyServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "credentialsType": None, + } + ) + + async def create_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + data_taxonomy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]]): + The request object. Create DataTaxonomy request. + parent (:class:`str`): + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): + Required. DataTaxonomy resource. + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy_id (:class:`str`): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + + This corresponds to the ``data_taxonomy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.create_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_taxonomy, data_taxonomy_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): + request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if data_taxonomy_id is not None: + request.data_taxonomy_id = data_taxonomy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, + *, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]]): + The request object. Update DataTaxonomy request. + data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.update_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_taxonomy, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_taxonomy.name", request.data_taxonomy.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_taxonomy(self, + request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]]): + The request object. Delete DataTaxonomy request. + name (:class:`str`): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): + request = data_taxonomy.DeleteDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_taxonomies(self, + request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTaxonomiesAsyncPager: + r"""Lists DataTaxonomy resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]]): + The request object. List DataTaxonomies request. + parent (:class:`str`): + Required. The resource name of the DataTaxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager: + List DataTaxonomies response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.list_data_taxonomies is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): + request = data_taxonomy.ListDataTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataTaxonomiesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_taxonomy(self, + request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataTaxonomy: + r"""Retrieves a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]]): + The request object. Get DataTaxonomy request. + name (:class:`str`): + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataTaxonomy: + DataTaxonomy represents a set of + hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have + attributes to manage PII data. It is + defined at project level. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.get_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): + request = data_taxonomy.GetDataTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + data_attribute_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]]): + The request object. Create DataAttributeBinding request. + parent (:class:`str`): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): + Required. DataAttributeBinding + resource. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding_id (:class:`str`): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + + This corresponds to the ``data_attribute_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.create_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_attribute_binding, data_attribute_binding_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): + request = data_taxonomy.CreateDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if data_attribute_binding_id is not None: + request.data_attribute_binding_id = data_attribute_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, + *, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]]): + The request object. Update DataAttributeBinding request. + data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.update_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_attribute_binding, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): + request = data_taxonomy.UpdateDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute_binding.name", request.data_attribute_binding.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]]): + The request object. Delete DataAttributeBinding request. + name (:class:`str`): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): + request = data_taxonomy.DeleteDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_attribute_bindings(self, + request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataAttributeBindingsAsyncPager: + r"""Lists DataAttributeBinding resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]]): + The request object. List DataAttributeBindings request. + parent (:class:`str`): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager: + List DataAttributeBindings response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.list_data_attribute_bindings is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): + request = data_taxonomy.ListDataAttributeBindingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attribute_bindings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataAttributeBindingsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataAttributeBinding: + r"""Retrieves a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]]): + The request object. Get DataAttributeBinding request. + name (:class:`str`): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataAttributeBinding: + DataAttributeBinding represents + binding of attributes to resources. Eg: + Bind 'CustomerInfo' entity with 'PII' + attribute. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.get_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): + request = data_taxonomy.GetDataAttributeBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_data_attribute(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + data_attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]]): + The request object. Create DataAttribute request. + parent (:class:`str`): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): + Required. DataAttribute resource. + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_id (:class:`str`): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + + This corresponds to the ``data_attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.create_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_attribute, data_attribute_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): + request = data_taxonomy.CreateDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute is not None: + request.data_attribute = data_attribute + if data_attribute_id is not None: + request.data_attribute_id = data_attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_data_attribute(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, + *, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]]): + The request object. Update DataAttribute request. + data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.update_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_attribute, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): + request = data_taxonomy.UpdateDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute is not None: + request.data_attribute = data_attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute.name", request.data_attribute.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_data_attribute(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]]): + The request object. Delete DataAttribute request. + name (:class:`str`): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): + request = data_taxonomy.DeleteDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_attributes(self, + request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataAttributesAsyncPager: + r"""Lists Data Attribute resources in a DataTaxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]]): + The request object. List DataAttributes request. + parent (:class:`str`): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager: + List DataAttributes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.list_data_attributes is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributesRequest): + request = data_taxonomy.ListDataAttributesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataAttributesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_attribute(self, + request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataAttribute: + r"""Retrieves a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]]): + The request object. Get DataAttribute request. + name (:class:`str`): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataAttribute: + Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceAsyncClient.get_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeRequest): + request = data_taxonomy.GetDataAttributeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataTaxonomyServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DataTaxonomyServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py new file mode 100644 index 000000000000..06bf7e455890 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -0,0 +1,2917 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataTaxonomyServiceGrpcTransport +from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport +from .transports.rest import DataTaxonomyServiceRestTransport + + +class DataTaxonomyServiceClientMeta(type): + """Metaclass for the DataTaxonomyService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] + _transport_registry["grpc"] = DataTaxonomyServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataTaxonomyServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataTaxonomyServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataTaxonomyServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataTaxonomyServiceClient(metaclass=DataTaxonomyServiceClientMeta): + """DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTaxonomyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataTaxonomyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTaxonomyServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_attribute_path(project: str,location: str,dataTaxonomy: str,data_attribute_id: str,) -> str: + """Returns a fully-qualified data_attribute string.""" + return "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) + + @staticmethod + def parse_data_attribute_path(path: str) -> Dict[str,str]: + """Parses a data_attribute path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)/attributes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_attribute_binding_path(project: str,location: str,data_attribute_binding_id: str,) -> str: + """Returns a fully-qualified data_attribute_binding string.""" + return "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) + + @staticmethod + def parse_data_attribute_binding_path(path: str) -> Dict[str,str]: + """Parses a data_attribute_binding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataAttributeBindings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def data_taxonomy_path(project: str,location: str,data_taxonomy_id: str,) -> str: + """Returns a fully-qualified data_taxonomy string.""" + return "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) + + @staticmethod + def parse_data_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a data_taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data taxonomy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTaxonomyServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataTaxonomyServiceClient._read_environment_variables() + self._client_cert_source = DataTaxonomyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataTaxonomyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataTaxonomyServiceTransport) + if transport_provided: + # transport is a DataTaxonomyServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataTaxonomyServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataTaxonomyServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport]] = ( + DataTaxonomyServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataTaxonomyServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataTaxonomyServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "credentialsType": None, + } + ) + + def create_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + data_taxonomy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]): + The request object. Create DataTaxonomy request. + parent (str): + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. DataTaxonomy resource. + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_taxonomy_id (str): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + + This corresponds to the ``data_taxonomy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + warnings.warn("DataTaxonomyServiceClient.create_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_taxonomy, data_taxonomy_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): + request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if data_taxonomy_id is not None: + request.data_taxonomy_id = data_taxonomy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_taxonomy(self, + request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, + *, + data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]): + The request object. Update DataTaxonomy request. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage + PII data. It is defined at project level. + + """ + warnings.warn("DataTaxonomyServiceClient.update_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_taxonomy, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_taxonomy is not None: + request.data_taxonomy = data_taxonomy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_taxonomy.name", request.data_taxonomy.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_taxonomy.DataTaxonomy, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_taxonomy(self, + request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]): + The request object. Delete DataTaxonomy request. + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceClient.delete_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): + request = data_taxonomy.DeleteDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_taxonomies(self, + request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTaxonomiesPager: + r"""Lists DataTaxonomy resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]): + The request object. List DataTaxonomies request. + parent (str): + Required. The resource name of the DataTaxonomy + location, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager: + List DataTaxonomies response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceClient.list_data_taxonomies is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): + request = data_taxonomy.ListDataTaxonomiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataTaxonomiesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_taxonomy(self, + request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataTaxonomy: + r"""Retrieves a DataTaxonomy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]): + The request object. Get DataTaxonomy request. + name (str): + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataTaxonomy: + DataTaxonomy represents a set of + hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have + attributes to manage PII data. It is + defined at project level. + + """ + warnings.warn("DataTaxonomyServiceClient.get_data_taxonomy is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): + request = data_taxonomy.GetDataTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + data_attribute_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]): + The request object. Create DataAttributeBinding request. + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. DataAttributeBinding + resource. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_binding_id (str): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + + This corresponds to the ``data_attribute_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + warnings.warn("DataTaxonomyServiceClient.create_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_attribute_binding, data_attribute_binding_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): + request = data_taxonomy.CreateDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if data_attribute_binding_id is not None: + request.data_attribute_binding_id = data_attribute_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, + *, + data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]): + The request object. Update DataAttributeBinding request. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind + 'CustomerInfo' entity with 'PII' attribute. + + """ + warnings.warn("DataTaxonomyServiceClient.update_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_attribute_binding, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): + request = data_taxonomy.UpdateDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute_binding is not None: + request.data_attribute_binding = data_attribute_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute_binding.name", request.data_attribute_binding.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttributeBinding, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]): + The request object. Delete DataAttributeBinding request. + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceClient.delete_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): + request = data_taxonomy.DeleteDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_attribute_bindings(self, + request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataAttributeBindingsPager: + r"""Lists DataAttributeBinding resources in a project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]): + The request object. List DataAttributeBindings request. + parent (str): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager: + List DataAttributeBindings response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceClient.list_data_attribute_bindings is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): + request = data_taxonomy.ListDataAttributeBindingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_attribute_bindings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataAttributeBindingsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_attribute_binding(self, + request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataAttributeBinding: + r"""Retrieves a DataAttributeBinding resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]): + The request object. Get DataAttributeBinding request. + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataAttributeBinding: + DataAttributeBinding represents + binding of attributes to resources. Eg: + Bind 'CustomerInfo' entity with 'PII' + attribute. + + """ + warnings.warn("DataTaxonomyServiceClient.get_data_attribute_binding is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): + request = data_taxonomy.GetDataAttributeBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_attribute_binding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_data_attribute(self, + request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + data_attribute_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]): + The request object. Create DataAttribute request. + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. DataAttribute resource. + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_attribute_id (str): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + + This corresponds to the ``data_attribute_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceClient.create_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_attribute, data_attribute_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): + request = data_taxonomy.CreateDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_attribute is not None: + request.data_attribute = data_attribute + if data_attribute_id is not None: + request.data_attribute_id = data_attribute_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_data_attribute(self, + request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, + *, + data_attribute: Optional[data_taxonomy.DataAttribute] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a DataAttribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]): + The request object. Update DataAttribute request. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. Only fields specified in ``update_mask`` are + updated. + + This corresponds to the ``data_attribute`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceClient.update_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_attribute, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): + request = data_taxonomy.UpdateDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_attribute is not None: + request.data_attribute = data_attribute + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("data_attribute.name", request.data_attribute.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + data_taxonomy.DataAttribute, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_data_attribute(self, + request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]): + The request object. Delete DataAttribute request. + name (str): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + warnings.warn("DataTaxonomyServiceClient.delete_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): + request = data_taxonomy.DeleteDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_attributes(self, + request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataAttributesPager: + r"""Lists Data Attribute resources in a DataTaxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]): + The request object. List DataAttributes request. + parent (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager: + List DataAttributes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn("DataTaxonomyServiceClient.list_data_attributes is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.ListDataAttributesRequest): + request = data_taxonomy.ListDataAttributesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataAttributesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_attribute(self, + request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_taxonomy.DataAttribute: + r"""Retrieves a Data Attribute resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]): + The request object. Get DataAttribute request. + name (str): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.DataAttribute: + Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a + hierarchy. A single dataAttribute resource can + contain specs of multiple types + + :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` + + """ + warnings.warn("DataTaxonomyServiceClient.get_data_attribute is deprecated", + DeprecationWarning) + + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_taxonomy.GetDataAttributeRequest): + request = data_taxonomy.GetDataAttributeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_attribute] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataTaxonomyServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DataTaxonomyServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py new file mode 100644 index 000000000000..1b187755306b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy + + +class ListDataTaxonomiesPager: + """A pager for iterating through ``list_data_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataTaxonomies`` requests and continue to iterate + through the ``data_taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataTaxonomiesResponse], + request: data_taxonomy.ListDataTaxonomiesRequest, + response: data_taxonomy.ListDataTaxonomiesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataTaxonomiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataTaxonomy]: + for page in self.pages: + yield from page.data_taxonomies + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataTaxonomiesAsyncPager: + """A pager for iterating through ``list_data_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataTaxonomies`` requests and continue to iterate + through the ``data_taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataTaxonomiesResponse]], + request: data_taxonomy.ListDataTaxonomiesRequest, + response: data_taxonomy.ListDataTaxonomiesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataTaxonomiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataTaxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.data_taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributeBindingsPager: + """A pager for iterating through ``list_data_attribute_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_attribute_bindings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataAttributeBindings`` requests and continue to iterate + through the ``data_attribute_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataAttributeBindingsResponse], + request: data_taxonomy.ListDataAttributeBindingsRequest, + response: data_taxonomy.ListDataAttributeBindingsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataAttributeBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataAttributeBinding]: + for page in self.pages: + yield from page.data_attribute_bindings + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributeBindingsAsyncPager: + """A pager for iterating through ``list_data_attribute_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_attribute_bindings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataAttributeBindings`` requests and continue to iterate + through the ``data_attribute_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]], + request: data_taxonomy.ListDataAttributeBindingsRequest, + response: data_taxonomy.ListDataAttributeBindingsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributeBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttributeBinding]: + async def async_generator(): + async for page in self.pages: + for response in page.data_attribute_bindings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributesPager: + """A pager for iterating through ``list_data_attributes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_attributes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataAttributes`` requests and continue to iterate + through the ``data_attributes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., data_taxonomy.ListDataAttributesResponse], + request: data_taxonomy.ListDataAttributesRequest, + response: data_taxonomy.ListDataAttributesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_taxonomy.ListDataAttributesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_taxonomy.DataAttribute]: + for page in self.pages: + yield from page.data_attributes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDataAttributesAsyncPager: + """A pager for iterating through ``list_data_attributes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_attributes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataAttributes`` requests and continue to iterate + through the ``data_attributes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[data_taxonomy.ListDataAttributesResponse]], + request: data_taxonomy.ListDataAttributesRequest, + response: data_taxonomy.ListDataAttributesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_taxonomy.ListDataAttributesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttribute]: + async def async_generator(): + async for page in self.pages: + for response in page.data_attributes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst new file mode 100644 index 000000000000..5c194fc01362 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataTaxonomyServiceTransport` is the ABC for all transports. +- public child `DataTaxonomyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataTaxonomyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataTaxonomyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataTaxonomyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py new file mode 100644 index 000000000000..4e06f5ff1989 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataTaxonomyServiceTransport +from .grpc import DataTaxonomyServiceGrpcTransport +from .grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport +from .rest import DataTaxonomyServiceRestTransport +from .rest import DataTaxonomyServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] +_transport_registry['grpc'] = DataTaxonomyServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataTaxonomyServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DataTaxonomyServiceRestTransport + +__all__ = ( + 'DataTaxonomyServiceTransport', + 'DataTaxonomyServiceGrpcTransport', + 'DataTaxonomyServiceGrpcAsyncIOTransport', + 'DataTaxonomyServiceRestTransport', + 'DataTaxonomyServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py new file mode 100644 index 000000000000..2a76beefdeac --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataTaxonomyServiceTransport(abc.ABC): + """Abstract transport class for DataTaxonomyService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_taxonomy: gapic_v1.method.wrap_method( + self.create_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_data_taxonomy: gapic_v1.method.wrap_method( + self.update_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_taxonomy: gapic_v1.method.wrap_method( + self.delete_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_data_taxonomies: gapic_v1.method.wrap_method( + self.list_data_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_data_taxonomy: gapic_v1.method.wrap_method( + self.get_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute_binding: gapic_v1.method.wrap_method( + self.create_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute_binding: gapic_v1.method.wrap_method( + self.update_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute_binding: gapic_v1.method.wrap_method( + self.delete_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attribute_bindings: gapic_v1.method.wrap_method( + self.list_data_attribute_bindings, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute_binding: gapic_v1.method.wrap_method( + self.get_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute: gapic_v1.method.wrap_method( + self.create_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute: gapic_v1.method.wrap_method( + self.update_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute: gapic_v1.method.wrap_method( + self.delete_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attributes: gapic_v1.method.wrap_method( + self.list_data_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute: gapic_v1.method.wrap_method( + self.get_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + Union[ + data_taxonomy.ListDataTaxonomiesResponse, + Awaitable[data_taxonomy.ListDataTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + Union[ + data_taxonomy.DataTaxonomy, + Awaitable[data_taxonomy.DataTaxonomy] + ]]: + raise NotImplementedError() + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + Union[ + data_taxonomy.ListDataAttributeBindingsResponse, + Awaitable[data_taxonomy.ListDataAttributeBindingsResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + Union[ + data_taxonomy.DataAttributeBinding, + Awaitable[data_taxonomy.DataAttributeBinding] + ]]: + raise NotImplementedError() + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + Union[ + data_taxonomy.ListDataAttributesResponse, + Awaitable[data_taxonomy.ListDataAttributesResponse] + ]]: + raise NotImplementedError() + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + Union[ + data_taxonomy.DataAttribute, + Awaitable[data_taxonomy.DataAttribute] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataTaxonomyServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py new file mode 100644 index 000000000000..28127ba1c292 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py @@ -0,0 +1,849 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataTaxonomyServiceGrpcTransport(DataTaxonomyServiceTransport): + """gRPC backend transport for DataTaxonomyService. + + DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data taxonomy method over gRPC. + + Create a DataTaxonomy resource. + + Returns: + Callable[[~.CreateDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_taxonomy' not in self._stubs: + self._stubs['create_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', + request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_taxonomy'] + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data taxonomy method over gRPC. + + Updates a DataTaxonomy resource. + + Returns: + Callable[[~.UpdateDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_taxonomy' not in self._stubs: + self._stubs['update_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', + request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_taxonomy'] + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data taxonomy method over gRPC. + + Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + Returns: + Callable[[~.DeleteDataTaxonomyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_taxonomy' not in self._stubs: + self._stubs['delete_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', + request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_taxonomy'] + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + data_taxonomy.ListDataTaxonomiesResponse]: + r"""Return a callable for the list data taxonomies method over gRPC. + + Lists DataTaxonomy resources in a project and + location. + + Returns: + Callable[[~.ListDataTaxonomiesRequest], + ~.ListDataTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_taxonomies' not in self._stubs: + self._stubs['list_data_taxonomies'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', + request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, + response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, + ) + return self._stubs['list_data_taxonomies'] + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + data_taxonomy.DataTaxonomy]: + r"""Return a callable for the get data taxonomy method over gRPC. + + Retrieves a DataTaxonomy resource. + + Returns: + Callable[[~.GetDataTaxonomyRequest], + ~.DataTaxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_taxonomy' not in self._stubs: + self._stubs['get_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', + request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, + response_deserializer=data_taxonomy.DataTaxonomy.deserialize, + ) + return self._stubs['get_data_taxonomy'] + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data attribute binding method over gRPC. + + Create a DataAttributeBinding resource. + + Returns: + Callable[[~.CreateDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute_binding' not in self._stubs: + self._stubs['create_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', + request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute_binding'] + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data attribute binding method over gRPC. + + Updates a DataAttributeBinding resource. + + Returns: + Callable[[~.UpdateDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute_binding' not in self._stubs: + self._stubs['update_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', + request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute_binding'] + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data attribute binding method over gRPC. + + Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + Returns: + Callable[[~.DeleteDataAttributeBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute_binding' not in self._stubs: + self._stubs['delete_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', + request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute_binding'] + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + data_taxonomy.ListDataAttributeBindingsResponse]: + r"""Return a callable for the list data attribute bindings method over gRPC. + + Lists DataAttributeBinding resources in a project and + location. + + Returns: + Callable[[~.ListDataAttributeBindingsRequest], + ~.ListDataAttributeBindingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attribute_bindings' not in self._stubs: + self._stubs['list_data_attribute_bindings'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', + request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, + ) + return self._stubs['list_data_attribute_bindings'] + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + data_taxonomy.DataAttributeBinding]: + r"""Return a callable for the get data attribute binding method over gRPC. + + Retrieves a DataAttributeBinding resource. + + Returns: + Callable[[~.GetDataAttributeBindingRequest], + ~.DataAttributeBinding]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute_binding' not in self._stubs: + self._stubs['get_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', + request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, + response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, + ) + return self._stubs['get_data_attribute_binding'] + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create data attribute method over gRPC. + + Create a DataAttribute resource. + + Returns: + Callable[[~.CreateDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute' not in self._stubs: + self._stubs['create_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', + request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute'] + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update data attribute method over gRPC. + + Updates a DataAttribute resource. + + Returns: + Callable[[~.UpdateDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute' not in self._stubs: + self._stubs['update_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', + request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute'] + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete data attribute method over gRPC. + + Deletes a Data Attribute resource. + + Returns: + Callable[[~.DeleteDataAttributeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute' not in self._stubs: + self._stubs['delete_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', + request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute'] + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + data_taxonomy.ListDataAttributesResponse]: + r"""Return a callable for the list data attributes method over gRPC. + + Lists Data Attribute resources in a DataTaxonomy. + + Returns: + Callable[[~.ListDataAttributesRequest], + ~.ListDataAttributesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attributes' not in self._stubs: + self._stubs['list_data_attributes'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', + request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, + ) + return self._stubs['list_data_attributes'] + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + data_taxonomy.DataAttribute]: + r"""Return a callable for the get data attribute method over gRPC. + + Retrieves a Data Attribute resource. + + Returns: + Callable[[~.GetDataAttributeRequest], + ~.DataAttribute]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute' not in self._stubs: + self._stubs['get_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', + request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, + response_deserializer=data_taxonomy.DataAttribute.deserialize, + ) + return self._stubs['get_data_attribute'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataTaxonomyServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..cc240458b4cd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py @@ -0,0 +1,970 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataTaxonomyServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataTaxonomyServiceGrpcAsyncIOTransport(DataTaxonomyServiceTransport): + """gRPC AsyncIO backend transport for DataTaxonomyService. + + DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data taxonomy method over gRPC. + + Create a DataTaxonomy resource. + + Returns: + Callable[[~.CreateDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_taxonomy' not in self._stubs: + self._stubs['create_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', + request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_taxonomy'] + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data taxonomy method over gRPC. + + Updates a DataTaxonomy resource. + + Returns: + Callable[[~.UpdateDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_taxonomy' not in self._stubs: + self._stubs['update_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', + request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_taxonomy'] + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data taxonomy method over gRPC. + + Deletes a DataTaxonomy resource. All attributes + within the DataTaxonomy must be deleted before the + DataTaxonomy can be deleted. + + Returns: + Callable[[~.DeleteDataTaxonomyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_taxonomy' not in self._stubs: + self._stubs['delete_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', + request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_taxonomy'] + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + Awaitable[data_taxonomy.ListDataTaxonomiesResponse]]: + r"""Return a callable for the list data taxonomies method over gRPC. + + Lists DataTaxonomy resources in a project and + location. + + Returns: + Callable[[~.ListDataTaxonomiesRequest], + Awaitable[~.ListDataTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_taxonomies' not in self._stubs: + self._stubs['list_data_taxonomies'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', + request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, + response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, + ) + return self._stubs['list_data_taxonomies'] + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + Awaitable[data_taxonomy.DataTaxonomy]]: + r"""Return a callable for the get data taxonomy method over gRPC. + + Retrieves a DataTaxonomy resource. + + Returns: + Callable[[~.GetDataTaxonomyRequest], + Awaitable[~.DataTaxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_taxonomy' not in self._stubs: + self._stubs['get_data_taxonomy'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', + request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, + response_deserializer=data_taxonomy.DataTaxonomy.deserialize, + ) + return self._stubs['get_data_taxonomy'] + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data attribute binding method over gRPC. + + Create a DataAttributeBinding resource. + + Returns: + Callable[[~.CreateDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute_binding' not in self._stubs: + self._stubs['create_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', + request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute_binding'] + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data attribute binding method over gRPC. + + Updates a DataAttributeBinding resource. + + Returns: + Callable[[~.UpdateDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute_binding' not in self._stubs: + self._stubs['update_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', + request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute_binding'] + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data attribute binding method over gRPC. + + Deletes a DataAttributeBinding resource. All + attributes within the DataAttributeBinding must be + deleted before the DataAttributeBinding can be deleted. + + Returns: + Callable[[~.DeleteDataAttributeBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute_binding' not in self._stubs: + self._stubs['delete_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', + request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute_binding'] + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]]: + r"""Return a callable for the list data attribute bindings method over gRPC. + + Lists DataAttributeBinding resources in a project and + location. + + Returns: + Callable[[~.ListDataAttributeBindingsRequest], + Awaitable[~.ListDataAttributeBindingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attribute_bindings' not in self._stubs: + self._stubs['list_data_attribute_bindings'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', + request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, + ) + return self._stubs['list_data_attribute_bindings'] + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + Awaitable[data_taxonomy.DataAttributeBinding]]: + r"""Return a callable for the get data attribute binding method over gRPC. + + Retrieves a DataAttributeBinding resource. + + Returns: + Callable[[~.GetDataAttributeBindingRequest], + Awaitable[~.DataAttributeBinding]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute_binding' not in self._stubs: + self._stubs['get_data_attribute_binding'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', + request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, + response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, + ) + return self._stubs['get_data_attribute_binding'] + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create data attribute method over gRPC. + + Create a DataAttribute resource. + + Returns: + Callable[[~.CreateDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_data_attribute' not in self._stubs: + self._stubs['create_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', + request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_data_attribute'] + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update data attribute method over gRPC. + + Updates a DataAttribute resource. + + Returns: + Callable[[~.UpdateDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_data_attribute' not in self._stubs: + self._stubs['update_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', + request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_data_attribute'] + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete data attribute method over gRPC. + + Deletes a Data Attribute resource. + + Returns: + Callable[[~.DeleteDataAttributeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_data_attribute' not in self._stubs: + self._stubs['delete_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', + request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_data_attribute'] + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + Awaitable[data_taxonomy.ListDataAttributesResponse]]: + r"""Return a callable for the list data attributes method over gRPC. + + Lists Data Attribute resources in a DataTaxonomy. + + Returns: + Callable[[~.ListDataAttributesRequest], + Awaitable[~.ListDataAttributesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_data_attributes' not in self._stubs: + self._stubs['list_data_attributes'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', + request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, + response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, + ) + return self._stubs['list_data_attributes'] + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + Awaitable[data_taxonomy.DataAttribute]]: + r"""Return a callable for the get data attribute method over gRPC. + + Retrieves a Data Attribute resource. + + Returns: + Callable[[~.GetDataAttributeRequest], + Awaitable[~.DataAttribute]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_data_attribute' not in self._stubs: + self._stubs['get_data_attribute'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', + request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, + response_deserializer=data_taxonomy.DataAttribute.deserialize, + ) + return self._stubs['get_data_attribute'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_taxonomy: self._wrap_method( + self.create_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_data_taxonomy: self._wrap_method( + self.update_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_taxonomy: self._wrap_method( + self.delete_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_data_taxonomies: self._wrap_method( + self.list_data_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_data_taxonomy: self._wrap_method( + self.get_data_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute_binding: self._wrap_method( + self.create_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute_binding: self._wrap_method( + self.update_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute_binding: self._wrap_method( + self.delete_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attribute_bindings: self._wrap_method( + self.list_data_attribute_bindings, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute_binding: self._wrap_method( + self.get_data_attribute_binding, + default_timeout=None, + client_info=client_info, + ), + self.create_data_attribute: self._wrap_method( + self.create_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.update_data_attribute: self._wrap_method( + self.update_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_attribute: self._wrap_method( + self.delete_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.list_data_attributes: self._wrap_method( + self.list_data_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_data_attribute: self._wrap_method( + self.get_data_attribute, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataTaxonomyServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py new file mode 100644 index 000000000000..34e996b419df --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py @@ -0,0 +1,3660 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseDataTaxonomyServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataTaxonomyServiceRestInterceptor: + """Interceptor for DataTaxonomyService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataTaxonomyServiceRestTransport. + + .. code-block:: python + class MyCustomDataTaxonomyServiceInterceptor(DataTaxonomyServiceRestInterceptor): + def pre_create_data_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_data_attribute_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_attribute_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_data_taxonomy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_taxonomy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_attribute_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_attribute_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_taxonomy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_taxonomy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_attribute_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_attribute_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_taxonomy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_taxonomy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_attribute_bindings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_attribute_bindings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_attributes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_attributes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_taxonomies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_taxonomies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_attribute(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_attribute(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_attribute_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_attribute_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_taxonomy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_taxonomy(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataTaxonomyServiceRestTransport(interceptor=MyCustomDataTaxonomyServiceInterceptor()) + client = DataTaxonomyServiceClient(transport=transport) + + + """ + def pre_create_data_attribute(self, request: data_taxonomy.CreateDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.CreateDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_data_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_create_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_attribute + + DEPRECATED. Please use the `post_create_data_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_create_data_attribute` interceptor runs + before the `post_create_data_attribute_with_metadata` interceptor. + """ + return response + + def post_create_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_create_data_attribute_with_metadata` + interceptor in new development instead of the `post_create_data_attribute` interceptor. + When both interceptors are used, this `post_create_data_attribute_with_metadata` interceptor runs after the + `post_create_data_attribute` interceptor. The (possibly modified) response returned by + `post_create_data_attribute` will be passed to + `post_create_data_attribute_with_metadata`. + """ + return response, metadata + + def pre_create_data_attribute_binding(self, request: data_taxonomy.CreateDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.CreateDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_data_attribute_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_create_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_attribute_binding + + DEPRECATED. Please use the `post_create_data_attribute_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_create_data_attribute_binding` interceptor runs + before the `post_create_data_attribute_binding_with_metadata` interceptor. + """ + return response + + def post_create_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_attribute_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_create_data_attribute_binding_with_metadata` + interceptor in new development instead of the `post_create_data_attribute_binding` interceptor. + When both interceptors are used, this `post_create_data_attribute_binding_with_metadata` interceptor runs after the + `post_create_data_attribute_binding` interceptor. The (possibly modified) response returned by + `post_create_data_attribute_binding` will be passed to + `post_create_data_attribute_binding_with_metadata`. + """ + return response, metadata + + def pre_create_data_taxonomy(self, request: gcd_data_taxonomy.CreateDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_data_taxonomy.CreateDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_data_taxonomy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_create_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_taxonomy + + DEPRECATED. Please use the `post_create_data_taxonomy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_create_data_taxonomy` interceptor runs + before the `post_create_data_taxonomy_with_metadata` interceptor. + """ + return response + + def post_create_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_taxonomy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_create_data_taxonomy_with_metadata` + interceptor in new development instead of the `post_create_data_taxonomy` interceptor. + When both interceptors are used, this `post_create_data_taxonomy_with_metadata` interceptor runs after the + `post_create_data_taxonomy` interceptor. The (possibly modified) response returned by + `post_create_data_taxonomy` will be passed to + `post_create_data_taxonomy_with_metadata`. + """ + return response, metadata + + def pre_delete_data_attribute(self, request: data_taxonomy.DeleteDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_data_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_delete_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_attribute + + DEPRECATED. Please use the `post_delete_data_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_delete_data_attribute` interceptor runs + before the `post_delete_data_attribute_with_metadata` interceptor. + """ + return response + + def post_delete_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_delete_data_attribute_with_metadata` + interceptor in new development instead of the `post_delete_data_attribute` interceptor. + When both interceptors are used, this `post_delete_data_attribute_with_metadata` interceptor runs after the + `post_delete_data_attribute` interceptor. The (possibly modified) response returned by + `post_delete_data_attribute` will be passed to + `post_delete_data_attribute_with_metadata`. + """ + return response, metadata + + def pre_delete_data_attribute_binding(self, request: data_taxonomy.DeleteDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_data_attribute_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_delete_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_attribute_binding + + DEPRECATED. Please use the `post_delete_data_attribute_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_delete_data_attribute_binding` interceptor runs + before the `post_delete_data_attribute_binding_with_metadata` interceptor. + """ + return response + + def post_delete_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_attribute_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_delete_data_attribute_binding_with_metadata` + interceptor in new development instead of the `post_delete_data_attribute_binding` interceptor. + When both interceptors are used, this `post_delete_data_attribute_binding_with_metadata` interceptor runs after the + `post_delete_data_attribute_binding` interceptor. The (possibly modified) response returned by + `post_delete_data_attribute_binding` will be passed to + `post_delete_data_attribute_binding_with_metadata`. + """ + return response, metadata + + def pre_delete_data_taxonomy(self, request: data_taxonomy.DeleteDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_data_taxonomy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_delete_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_taxonomy + + DEPRECATED. Please use the `post_delete_data_taxonomy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_delete_data_taxonomy` interceptor runs + before the `post_delete_data_taxonomy_with_metadata` interceptor. + """ + return response + + def post_delete_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_taxonomy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_delete_data_taxonomy_with_metadata` + interceptor in new development instead of the `post_delete_data_taxonomy` interceptor. + When both interceptors are used, this `post_delete_data_taxonomy_with_metadata` interceptor runs after the + `post_delete_data_taxonomy` interceptor. The (possibly modified) response returned by + `post_delete_data_taxonomy` will be passed to + `post_delete_data_taxonomy_with_metadata`. + """ + return response, metadata + + def pre_get_data_attribute(self, request: data_taxonomy.GetDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_get_data_attribute(self, response: data_taxonomy.DataAttribute) -> data_taxonomy.DataAttribute: + """Post-rpc interceptor for get_data_attribute + + DEPRECATED. Please use the `post_get_data_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_get_data_attribute` interceptor runs + before the `post_get_data_attribute_with_metadata` interceptor. + """ + return response + + def post_get_data_attribute_with_metadata(self, response: data_taxonomy.DataAttribute, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataAttribute, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_get_data_attribute_with_metadata` + interceptor in new development instead of the `post_get_data_attribute` interceptor. + When both interceptors are used, this `post_get_data_attribute_with_metadata` interceptor runs after the + `post_get_data_attribute` interceptor. The (possibly modified) response returned by + `post_get_data_attribute` will be passed to + `post_get_data_attribute_with_metadata`. + """ + return response, metadata + + def pre_get_data_attribute_binding(self, request: data_taxonomy.GetDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_attribute_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_get_data_attribute_binding(self, response: data_taxonomy.DataAttributeBinding) -> data_taxonomy.DataAttributeBinding: + """Post-rpc interceptor for get_data_attribute_binding + + DEPRECATED. Please use the `post_get_data_attribute_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_get_data_attribute_binding` interceptor runs + before the `post_get_data_attribute_binding_with_metadata` interceptor. + """ + return response + + def post_get_data_attribute_binding_with_metadata(self, response: data_taxonomy.DataAttributeBinding, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataAttributeBinding, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_attribute_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_get_data_attribute_binding_with_metadata` + interceptor in new development instead of the `post_get_data_attribute_binding` interceptor. + When both interceptors are used, this `post_get_data_attribute_binding_with_metadata` interceptor runs after the + `post_get_data_attribute_binding` interceptor. The (possibly modified) response returned by + `post_get_data_attribute_binding` will be passed to + `post_get_data_attribute_binding_with_metadata`. + """ + return response, metadata + + def pre_get_data_taxonomy(self, request: data_taxonomy.GetDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_taxonomy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_get_data_taxonomy(self, response: data_taxonomy.DataTaxonomy) -> data_taxonomy.DataTaxonomy: + """Post-rpc interceptor for get_data_taxonomy + + DEPRECATED. Please use the `post_get_data_taxonomy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_get_data_taxonomy` interceptor runs + before the `post_get_data_taxonomy_with_metadata` interceptor. + """ + return response + + def post_get_data_taxonomy_with_metadata(self, response: data_taxonomy.DataTaxonomy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataTaxonomy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_taxonomy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_get_data_taxonomy_with_metadata` + interceptor in new development instead of the `post_get_data_taxonomy` interceptor. + When both interceptors are used, this `post_get_data_taxonomy_with_metadata` interceptor runs after the + `post_get_data_taxonomy` interceptor. The (possibly modified) response returned by + `post_get_data_taxonomy` will be passed to + `post_get_data_taxonomy_with_metadata`. + """ + return response, metadata + + def pre_list_data_attribute_bindings(self, request: data_taxonomy.ListDataAttributeBindingsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributeBindingsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_data_attribute_bindings + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_list_data_attribute_bindings(self, response: data_taxonomy.ListDataAttributeBindingsResponse) -> data_taxonomy.ListDataAttributeBindingsResponse: + """Post-rpc interceptor for list_data_attribute_bindings + + DEPRECATED. Please use the `post_list_data_attribute_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_list_data_attribute_bindings` interceptor runs + before the `post_list_data_attribute_bindings_with_metadata` interceptor. + """ + return response + + def post_list_data_attribute_bindings_with_metadata(self, response: data_taxonomy.ListDataAttributeBindingsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributeBindingsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_data_attribute_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_list_data_attribute_bindings_with_metadata` + interceptor in new development instead of the `post_list_data_attribute_bindings` interceptor. + When both interceptors are used, this `post_list_data_attribute_bindings_with_metadata` interceptor runs after the + `post_list_data_attribute_bindings` interceptor. The (possibly modified) response returned by + `post_list_data_attribute_bindings` will be passed to + `post_list_data_attribute_bindings_with_metadata`. + """ + return response, metadata + + def pre_list_data_attributes(self, request: data_taxonomy.ListDataAttributesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_data_attributes + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_list_data_attributes(self, response: data_taxonomy.ListDataAttributesResponse) -> data_taxonomy.ListDataAttributesResponse: + """Post-rpc interceptor for list_data_attributes + + DEPRECATED. Please use the `post_list_data_attributes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_list_data_attributes` interceptor runs + before the `post_list_data_attributes_with_metadata` interceptor. + """ + return response + + def post_list_data_attributes_with_metadata(self, response: data_taxonomy.ListDataAttributesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_data_attributes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_list_data_attributes_with_metadata` + interceptor in new development instead of the `post_list_data_attributes` interceptor. + When both interceptors are used, this `post_list_data_attributes_with_metadata` interceptor runs after the + `post_list_data_attributes` interceptor. The (possibly modified) response returned by + `post_list_data_attributes` will be passed to + `post_list_data_attributes_with_metadata`. + """ + return response, metadata + + def pre_list_data_taxonomies(self, request: data_taxonomy.ListDataTaxonomiesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataTaxonomiesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_data_taxonomies + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_list_data_taxonomies(self, response: data_taxonomy.ListDataTaxonomiesResponse) -> data_taxonomy.ListDataTaxonomiesResponse: + """Post-rpc interceptor for list_data_taxonomies + + DEPRECATED. Please use the `post_list_data_taxonomies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_list_data_taxonomies` interceptor runs + before the `post_list_data_taxonomies_with_metadata` interceptor. + """ + return response + + def post_list_data_taxonomies_with_metadata(self, response: data_taxonomy.ListDataTaxonomiesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataTaxonomiesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_data_taxonomies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_list_data_taxonomies_with_metadata` + interceptor in new development instead of the `post_list_data_taxonomies` interceptor. + When both interceptors are used, this `post_list_data_taxonomies_with_metadata` interceptor runs after the + `post_list_data_taxonomies` interceptor. The (possibly modified) response returned by + `post_list_data_taxonomies` will be passed to + `post_list_data_taxonomies_with_metadata`. + """ + return response, metadata + + def pre_update_data_attribute(self, request: data_taxonomy.UpdateDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.UpdateDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_data_attribute + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_update_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_attribute + + DEPRECATED. Please use the `post_update_data_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_update_data_attribute` interceptor runs + before the `post_update_data_attribute_with_metadata` interceptor. + """ + return response + + def post_update_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_update_data_attribute_with_metadata` + interceptor in new development instead of the `post_update_data_attribute` interceptor. + When both interceptors are used, this `post_update_data_attribute_with_metadata` interceptor runs after the + `post_update_data_attribute` interceptor. The (possibly modified) response returned by + `post_update_data_attribute` will be passed to + `post_update_data_attribute_with_metadata`. + """ + return response, metadata + + def pre_update_data_attribute_binding(self, request: data_taxonomy.UpdateDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.UpdateDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_data_attribute_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_update_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_attribute_binding + + DEPRECATED. Please use the `post_update_data_attribute_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_update_data_attribute_binding` interceptor runs + before the `post_update_data_attribute_binding_with_metadata` interceptor. + """ + return response + + def post_update_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_attribute_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_update_data_attribute_binding_with_metadata` + interceptor in new development instead of the `post_update_data_attribute_binding` interceptor. + When both interceptors are used, this `post_update_data_attribute_binding_with_metadata` interceptor runs after the + `post_update_data_attribute_binding` interceptor. The (possibly modified) response returned by + `post_update_data_attribute_binding` will be passed to + `post_update_data_attribute_binding_with_metadata`. + """ + return response, metadata + + def pre_update_data_taxonomy(self, request: gcd_data_taxonomy.UpdateDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_data_taxonomy.UpdateDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_data_taxonomy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_update_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_taxonomy + + DEPRECATED. Please use the `post_update_data_taxonomy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. This `post_update_data_taxonomy` interceptor runs + before the `post_update_data_taxonomy_with_metadata` interceptor. + """ + return response + + def post_update_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_taxonomy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTaxonomyService server but before it is returned to user code. + + We recommend only using this `post_update_data_taxonomy_with_metadata` + interceptor in new development instead of the `post_update_data_taxonomy` interceptor. + When both interceptors are used, this `post_update_data_taxonomy_with_metadata` interceptor runs after the + `post_update_data_taxonomy` interceptor. The (possibly modified) response returned by + `post_update_data_taxonomy` will be passed to + `post_update_data_taxonomy_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTaxonomyService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataTaxonomyService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataTaxonomyServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataTaxonomyServiceRestInterceptor + + +class DataTaxonomyServiceRestTransport(_BaseDataTaxonomyServiceRestTransport): + """REST backend synchronous transport for DataTaxonomyService. + + DataTaxonomyService enables attribute-based governance. The + resources currently offered include DataTaxonomy and + DataAttribute. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DataTaxonomyServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataTaxonomyServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.CreateDataAttribute") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: data_taxonomy.CreateDataAttributeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create data attribute method over HTTP. + + Args: + request (~.data_taxonomy.CreateDataAttributeRequest): + The request object. Create DataAttribute request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_http_options() + + request, metadata = self._interceptor.pre_create_data_attribute(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataAttribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataAttribute", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._CreateDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_attribute_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataAttribute", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.CreateDataAttributeBinding") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: data_taxonomy.CreateDataAttributeBindingRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create data attribute + binding method over HTTP. + + Args: + request (~.data_taxonomy.CreateDataAttributeBindingRequest): + The request object. Create DataAttributeBinding request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_http_options() + + request, metadata = self._interceptor.pre_create_data_attribute_binding(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataAttributeBinding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataAttributeBinding", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._CreateDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_attribute_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_attribute_binding_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataAttributeBinding", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.CreateDataTaxonomy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gcd_data_taxonomy.CreateDataTaxonomyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create data taxonomy method over HTTP. + + Args: + request (~.gcd_data_taxonomy.CreateDataTaxonomyRequest): + The request object. Create DataTaxonomy request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_http_options() + + request, metadata = self._interceptor.pre_create_data_taxonomy(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataTaxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataTaxonomy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._CreateDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_taxonomy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_taxonomy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CreateDataTaxonomy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.DeleteDataAttribute") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.DeleteDataAttributeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete data attribute method over HTTP. + + Args: + request (~.data_taxonomy.DeleteDataAttributeRequest): + The request object. Delete DataAttribute request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_attribute(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataAttribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataAttribute", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._DeleteDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_data_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_attribute_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataAttribute", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.DeleteDataAttributeBinding") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.DeleteDataAttributeBindingRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete data attribute + binding method over HTTP. + + Args: + request (~.data_taxonomy.DeleteDataAttributeBindingRequest): + The request object. Delete DataAttributeBinding request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_attribute_binding(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataAttributeBinding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataAttributeBinding", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._DeleteDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_data_attribute_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_attribute_binding_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataAttributeBinding", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.DeleteDataTaxonomy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.DeleteDataTaxonomyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete data taxonomy method over HTTP. + + Args: + request (~.data_taxonomy.DeleteDataTaxonomyRequest): + The request object. Delete DataTaxonomy request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_taxonomy(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataTaxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataTaxonomy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._DeleteDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_data_taxonomy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_taxonomy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteDataTaxonomy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.GetDataAttribute") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.GetDataAttributeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.DataAttribute: + r"""Call the get data attribute method over HTTP. + + Args: + request (~.data_taxonomy.GetDataAttributeRequest): + The request object. Get DataAttribute request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.DataAttribute: + Denotes one dataAttribute in a dataTaxonomy, for + example, PII. DataAttribute resources can be defined in + a hierarchy. A single dataAttribute resource can contain + specs of multiple types + + :: + + PII + - ResourceAccessSpec : + - readers :foo@bar.com + - DataAccessSpec : + - readers :bar@foo.com + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_http_options() + + request, metadata = self._interceptor.pre_get_data_attribute(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataAttribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataAttribute", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._GetDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.DataAttribute() + pb_resp = data_taxonomy.DataAttribute.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_attribute_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.DataAttribute.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataAttribute", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.GetDataAttributeBinding") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.GetDataAttributeBindingRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.DataAttributeBinding: + r"""Call the get data attribute + binding method over HTTP. + + Args: + request (~.data_taxonomy.GetDataAttributeBindingRequest): + The request object. Get DataAttributeBinding request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.DataAttributeBinding: + DataAttributeBinding represents + binding of attributes to resources. Eg: + Bind 'CustomerInfo' entity with 'PII' + attribute. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_http_options() + + request, metadata = self._interceptor.pre_get_data_attribute_binding(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataAttributeBinding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataAttributeBinding", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._GetDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.DataAttributeBinding() + pb_resp = data_taxonomy.DataAttributeBinding.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_attribute_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_attribute_binding_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.DataAttributeBinding.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataAttributeBinding", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.GetDataTaxonomy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.GetDataTaxonomyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.DataTaxonomy: + r"""Call the get data taxonomy method over HTTP. + + Args: + request (~.data_taxonomy.GetDataTaxonomyRequest): + The request object. Get DataTaxonomy request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.DataTaxonomy: + DataTaxonomy represents a set of + hierarchical DataAttributes resources, + grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have + attributes to manage PII data. It is + defined at project level. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_http_options() + + request, metadata = self._interceptor.pre_get_data_taxonomy(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataTaxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataTaxonomy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._GetDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.DataTaxonomy() + pb_resp = data_taxonomy.DataTaxonomy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_taxonomy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_taxonomy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.DataTaxonomy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetDataTaxonomy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataAttributeBindings(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.ListDataAttributeBindings") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.ListDataAttributeBindingsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.ListDataAttributeBindingsResponse: + r"""Call the list data attribute + bindings method over HTTP. + + Args: + request (~.data_taxonomy.ListDataAttributeBindingsRequest): + The request object. List DataAttributeBindings request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.ListDataAttributeBindingsResponse: + List DataAttributeBindings response. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_http_options() + + request, metadata = self._interceptor.pre_list_data_attribute_bindings(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataAttributeBindings", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataAttributeBindings", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._ListDataAttributeBindings._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.ListDataAttributeBindingsResponse() + pb_resp = data_taxonomy.ListDataAttributeBindingsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_attribute_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_attribute_bindings_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.ListDataAttributeBindingsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataAttributeBindings", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataAttributes(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.ListDataAttributes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.ListDataAttributesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.ListDataAttributesResponse: + r"""Call the list data attributes method over HTTP. + + Args: + request (~.data_taxonomy.ListDataAttributesRequest): + The request object. List DataAttributes request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.ListDataAttributesResponse: + List DataAttributes response. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_http_options() + + request, metadata = self._interceptor.pre_list_data_attributes(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataAttributes", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataAttributes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._ListDataAttributes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.ListDataAttributesResponse() + pb_resp = data_taxonomy.ListDataAttributesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_attributes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_attributes_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.ListDataAttributesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataAttributes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataTaxonomies(_BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.ListDataTaxonomies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: data_taxonomy.ListDataTaxonomiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> data_taxonomy.ListDataTaxonomiesResponse: + r"""Call the list data taxonomies method over HTTP. + + Args: + request (~.data_taxonomy.ListDataTaxonomiesRequest): + The request object. List DataTaxonomies request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_taxonomy.ListDataTaxonomiesResponse: + List DataTaxonomies response. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_http_options() + + request, metadata = self._interceptor.pre_list_data_taxonomies(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataTaxonomies", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataTaxonomies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._ListDataTaxonomies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_taxonomy.ListDataTaxonomiesResponse() + pb_resp = data_taxonomy.ListDataTaxonomiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_taxonomies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_taxonomies_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = data_taxonomy.ListDataTaxonomiesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListDataTaxonomies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.UpdateDataAttribute") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: data_taxonomy.UpdateDataAttributeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update data attribute method over HTTP. + + Args: + request (~.data_taxonomy.UpdateDataAttributeRequest): + The request object. Update DataAttribute request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_http_options() + + request, metadata = self._interceptor.pre_update_data_attribute(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataAttribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataAttribute", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._UpdateDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_attribute_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataAttribute", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.UpdateDataAttributeBinding") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: data_taxonomy.UpdateDataAttributeBindingRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update data attribute + binding method over HTTP. + + Args: + request (~.data_taxonomy.UpdateDataAttributeBindingRequest): + The request object. Update DataAttributeBinding request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_http_options() + + request, metadata = self._interceptor.pre_update_data_attribute_binding(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataAttributeBinding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataAttributeBinding", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._UpdateDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_attribute_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_attribute_binding_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataAttributeBinding", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.UpdateDataTaxonomy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gcd_data_taxonomy.UpdateDataTaxonomyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update data taxonomy method over HTTP. + + Args: + request (~.gcd_data_taxonomy.UpdateDataTaxonomyRequest): + The request object. Update DataTaxonomy request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_http_options() + + request, metadata = self._interceptor.pre_update_data_taxonomy(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataTaxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataTaxonomy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._UpdateDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_taxonomy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_taxonomy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "UpdateDataTaxonomy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_data_attribute(self) -> Callable[ + [data_taxonomy.CreateDataAttributeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_data_attribute_binding(self) -> Callable[ + [data_taxonomy.CreateDataAttributeBindingRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.CreateDataTaxonomyRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_attribute(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_attribute_binding(self) -> Callable[ + [data_taxonomy.DeleteDataAttributeBindingRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_taxonomy(self) -> Callable[ + [data_taxonomy.DeleteDataTaxonomyRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_attribute(self) -> Callable[ + [data_taxonomy.GetDataAttributeRequest], + data_taxonomy.DataAttribute]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_attribute_binding(self) -> Callable[ + [data_taxonomy.GetDataAttributeBindingRequest], + data_taxonomy.DataAttributeBinding]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_taxonomy(self) -> Callable[ + [data_taxonomy.GetDataTaxonomyRequest], + data_taxonomy.DataTaxonomy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_attribute_bindings(self) -> Callable[ + [data_taxonomy.ListDataAttributeBindingsRequest], + data_taxonomy.ListDataAttributeBindingsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataAttributeBindings(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_attributes(self) -> Callable[ + [data_taxonomy.ListDataAttributesRequest], + data_taxonomy.ListDataAttributesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataAttributes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_taxonomies(self) -> Callable[ + [data_taxonomy.ListDataTaxonomiesRequest], + data_taxonomy.ListDataTaxonomiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataTaxonomies(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_attribute(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataAttribute(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_attribute_binding(self) -> Callable[ + [data_taxonomy.UpdateDataAttributeBindingRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_taxonomy(self) -> Callable[ + [gcd_data_taxonomy.UpdateDataTaxonomyRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseDataTaxonomyServiceRestTransport._BaseGetLocation, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseDataTaxonomyServiceRestTransport._BaseListLocations, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseDataTaxonomyServiceRestTransport._BaseCancelOperation, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseDataTaxonomyServiceRestTransport._BaseGetOperation, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseDataTaxonomyServiceRestTransport._BaseListOperations, DataTaxonomyServiceRestStub): + def __hash__(self): + return hash("DataTaxonomyServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTaxonomyServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DataTaxonomyServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py new file mode 100644 index 000000000000..055bf23dd05a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py @@ -0,0 +1,883 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDataTaxonomyServiceRestTransport(DataTaxonomyServiceTransport): + """Base REST backend transport for DataTaxonomyService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateDataAttribute: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataAttributeId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes', + 'body': 'data_attribute', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.CreateDataAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDataAttributeBinding: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataAttributeBindingId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/dataAttributeBindings', + 'body': 'data_attribute_binding', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.CreateDataAttributeBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDataTaxonomy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataTaxonomyId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/dataTaxonomies', + 'body': 'data_taxonomy', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcd_data_taxonomy.CreateDataTaxonomyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataAttribute: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.DeleteDataAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataAttributeBinding: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "etag" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/dataAttributeBindings/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.DeleteDataAttributeBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataTaxonomy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.DeleteDataTaxonomyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataAttribute: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.GetDataAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataAttributeBinding: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/dataAttributeBindings/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.GetDataAttributeBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataTaxonomy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.GetDataTaxonomyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataAttributeBindings: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/dataAttributeBindings', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.ListDataAttributeBindingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataAttributes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.ListDataAttributesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataTaxonomies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/dataTaxonomies', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.ListDataTaxonomiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataAttribute: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{data_attribute.name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', + 'body': 'data_attribute', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.UpdateDataAttributeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataAttributeBinding: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{data_attribute_binding.name=projects/*/locations/*/dataAttributeBindings/*}', + 'body': 'data_attribute_binding', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_taxonomy.UpdateDataAttributeBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataTaxonomy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{data_taxonomy.name=projects/*/locations/*/dataTaxonomies/*}', + 'body': 'data_taxonomy', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcd_data_taxonomy.UpdateDataTaxonomyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseDataTaxonomyServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py new file mode 100644 index 000000000000..e865ac090fc7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataplexServiceClient +from .async_client import DataplexServiceAsyncClient + +__all__ = ( + 'DataplexServiceClient', + 'DataplexServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py new file mode 100644 index 000000000000..7abf8f9d4836 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -0,0 +1,4716 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport +from .client import DataplexServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DataplexServiceAsyncClient: + """Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + """ + + _client: DataplexServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataplexServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataplexServiceClient._DEFAULT_UNIVERSE + + action_path = staticmethod(DataplexServiceClient.action_path) + parse_action_path = staticmethod(DataplexServiceClient.parse_action_path) + asset_path = staticmethod(DataplexServiceClient.asset_path) + parse_asset_path = staticmethod(DataplexServiceClient.parse_asset_path) + environment_path = staticmethod(DataplexServiceClient.environment_path) + parse_environment_path = staticmethod(DataplexServiceClient.parse_environment_path) + job_path = staticmethod(DataplexServiceClient.job_path) + parse_job_path = staticmethod(DataplexServiceClient.parse_job_path) + lake_path = staticmethod(DataplexServiceClient.lake_path) + parse_lake_path = staticmethod(DataplexServiceClient.parse_lake_path) + session_path = staticmethod(DataplexServiceClient.session_path) + parse_session_path = staticmethod(DataplexServiceClient.parse_session_path) + task_path = staticmethod(DataplexServiceClient.task_path) + parse_task_path = staticmethod(DataplexServiceClient.parse_task_path) + zone_path = staticmethod(DataplexServiceClient.zone_path) + parse_zone_path = staticmethod(DataplexServiceClient.parse_zone_path) + common_billing_account_path = staticmethod(DataplexServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataplexServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataplexServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DataplexServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DataplexServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DataplexServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DataplexServiceClient.common_project_path) + parse_common_project_path = staticmethod(DataplexServiceClient.parse_common_project_path) + common_location_path = staticmethod(DataplexServiceClient.common_location_path) + parse_common_location_path = staticmethod(DataplexServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceAsyncClient: The constructed client. + """ + return DataplexServiceClient.from_service_account_info.__func__(DataplexServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceAsyncClient: The constructed client. + """ + return DataplexServiceClient.from_service_account_file.__func__(DataplexServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataplexServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataplexServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataplexServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataplexServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataplex service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataplexServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataplexServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "credentialsType": None, + } + ) + + async def create_lake(self, + request: Optional[Union[service.CreateLakeRequest, dict]] = None, + *, + parent: Optional[str] = None, + lake: Optional[resources.Lake] = None, + lake_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]]): + The request object. Create lake request. + parent (:class:`str`): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake (:class:`google.cloud.dataplex_v1.types.Lake`): + Required. Lake resource + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake_id (:class:`str`): + Required. Lake identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + + This corresponds to the ``lake_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, lake, lake_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateLakeRequest): + request = service.CreateLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lake is not None: + request.lake = lake + if lake_id is not None: + request.lake_id = lake_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_lake(self, + request: Optional[Union[service.UpdateLakeRequest, dict]] = None, + *, + lake: Optional[resources.Lake] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]]): + The request object. Update lake request. + lake (:class:`google.cloud.dataplex_v1.types.Lake`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [lake, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateLakeRequest): + request = service.UpdateLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lake is not None: + request.lake = lake + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("lake.name", request.lake.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_lake(self, + request: Optional[Union[service.DeleteLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]]): + The request object. Delete lake request. + name (:class:`str`): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteLakeRequest): + request = service.DeleteLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_lakes(self, + request: Optional[Union[service.ListLakesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLakesAsyncPager: + r"""Lists lake resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]]): + The request object. List lakes request. + parent (:class:`str`): + Required. The resource name of the lake location, of the + form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager: + List lakes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakesRequest): + request = service.ListLakesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_lakes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLakesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_lake(self, + request: Optional[Union[service.GetLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Lake: + r"""Retrieves a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lake(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]]): + The request object. Get lake request. + name (:class:`str`): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Lake: + A lake is a centralized repository + for managing enterprise data across the + organization distributed across many + cloud projects, and stored in a variety + of storage services such as Google Cloud + Storage and BigQuery. The resources + attached to a lake are referred to as + managed resources. Data within these + managed resources can be structured or + unstructured. A lake provides data + admins with tools to organize, secure + and manage their data at scale, and + provides data scientists and data + engineers an integrated experience to + easily search, discover, analyze and + transform data and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetLakeRequest): + request = service.GetLakeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_lake_actions(self, + request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLakeActionsAsyncPager: + r"""Lists action resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]]): + The request object. List lake actions request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakeActionsRequest): + request = service.ListLakeActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_lake_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLakeActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_zone(self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a zone resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]]): + The request object. Create zone request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`google.cloud.dataplex_v1.types.Zone`): + Required. Zone resource. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (:class:`str`): + Required. Zone identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in + a project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, zone, zone_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_zone(self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]]): + The request object. Update zone request. + zone (:class:`google.cloud.dataplex_v1.types.Zone`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [zone, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("zone.name", request.zone.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_zone(self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]]): + The request object. Delete zone request. + name (:class:`str`): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_zones(self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListZonesAsyncPager: + r"""Lists zone resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]]): + The request object. List zones request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager: + List zones response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_zones] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListZonesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_zone(self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Zone: + r"""Retrieves a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]]): + The request object. Get zone request. + name (:class:`str`): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Zone: + A zone represents a logical group of + related assets within a lake. A zone can + be used to map to organizational + structure or represent stages of data + readiness from raw to curated. It + provides managing behavior that is + shared or inherited by all contained + assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_zone_actions(self, + request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListZoneActionsAsyncPager: + r"""Lists action resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]]): + The request object. List zone actions request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZoneActionsRequest): + request = service.ListZoneActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_zone_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListZoneActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_asset(self, + request: Optional[Union[service.CreateAssetRequest, dict]] = None, + *, + parent: Optional[str] = None, + asset: Optional[resources.Asset] = None, + asset_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]]): + The request object. Create asset request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset (:class:`google.cloud.dataplex_v1.types.Asset`): + Required. Asset resource. + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_id (:class:`str`): + Required. Asset identifier. This ID will be used to + generate names such as table names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + + This corresponds to the ``asset_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, asset, asset_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateAssetRequest): + request = service.CreateAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if asset is not None: + request.asset = asset + if asset_id is not None: + request.asset_id = asset_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_asset(self, + request: Optional[Union[service.UpdateAssetRequest, dict]] = None, + *, + asset: Optional[resources.Asset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]]): + The request object. Update asset request. + asset (:class:`google.cloud.dataplex_v1.types.Asset`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [asset, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateAssetRequest): + request = service.UpdateAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if asset is not None: + request.asset = asset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("asset.name", request.asset.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_asset(self, + request: Optional[Union[service.DeleteAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]]): + The request object. Delete asset request. + name (:class:`str`): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteAssetRequest): + request = service.DeleteAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_assets(self, + request: Optional[Union[service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsAsyncPager: + r"""Lists asset resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]]): + The request object. List assets request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager: + List assets response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetsRequest): + request = service.ListAssetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssetsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_asset(self, + request: Optional[Union[service.GetAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Asset: + r"""Retrieves an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_asset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]]): + The request object. Get asset request. + name (:class:`str`): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Asset: + An asset represents a cloud resource + that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetAssetRequest): + request = service.GetAssetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_asset_actions(self, + request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetActionsAsyncPager: + r"""Lists action resources in an asset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]]): + The request object. List asset actions request. + parent (:class:`str`): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetActionsRequest): + request = service.ListAssetActionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_asset_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssetActionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task(self, + request: Optional[Union[service.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[tasks.Task] = None, + task_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a task resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]]): + The request object. Create task request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.dataplex_v1.types.Task`): + Required. Task resource. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (:class:`str`): + Required. Task identifier. + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, task, task_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTaskRequest): + request = service.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + if task_id is not None: + request.task_id = task_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_task(self, + request: Optional[Union[service.UpdateTaskRequest, dict]] = None, + *, + task: Optional[tasks.Task] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]]): + The request object. Update task request. + task (:class:`google.cloud.dataplex_v1.types.Task`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [task, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTaskRequest): + request = service.UpdateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if task is not None: + request.task = task + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("task.name", request.task.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_task(self, + request: Optional[Union[service.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]]): + The request object. Delete task request. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTaskRequest): + request = service.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_tasks(self, + request: Optional[Union[service.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists tasks under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]]): + The request object. List tasks request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager: + List tasks response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTasksRequest): + request = service.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task(self, + request: Optional[Union[service.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tasks.Task: + r"""Get task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]]): + The request object. Get task request. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Task: + A task represents a user-visible job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTaskRequest): + request = service.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: Optional[Union[service.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists Jobs under the given task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]]): + The request object. List jobs request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager: + List jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListJobsRequest): + request = service.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_task(self, + request: Optional[Union[service.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.RunTaskResponse: + r"""Run an on demand execution of a Task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]]): + The request object. + name (:class:`str`): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.RunTaskResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RunTaskRequest): + request = service.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: Optional[Union[service.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tasks.Job: + r"""Get job resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]]): + The request object. Get job request. + name (:class:`str`): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Job: + A job represents an instance of a + task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetJobRequest): + request = service.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_job(self, + request: Optional[Union[service.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Cancel jobs running for the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]]): + The request object. Cancel task jobs. + name (:class:`str`): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CancelJobRequest): + request = service.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_environment(self, + request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + environment: Optional[analyze.Environment] = None, + environment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]]): + The request object. Create environment request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (:class:`str`): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, environment, environment_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_environment(self, + request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, + *, + environment: Optional[analyze.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]]): + The request object. Update environment request. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [environment, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("environment.name", request.environment.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_environment(self, + request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]]): + The request object. Delete environment request. + name (:class:`str`): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_environments(self, + request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEnvironmentsAsyncPager: + r"""Lists environments under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]]): + The request object. List environments request. + parent (:class:`str`): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: + List environments response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnvironmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_environment(self, + request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]]): + The request object. Get environment request. + name (:class:`str`): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions(self, + request: Optional[Union[service.ListSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists session resources in an environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]]): + The request object. List sessions request. + parent (:class:`str`): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: + List sessions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataplexServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DataplexServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py new file mode 100644 index 000000000000..ce202c7c2fef --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -0,0 +1,5118 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataplexServiceGrpcTransport +from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport +from .transports.rest import DataplexServiceRestTransport + + +class DataplexServiceClientMeta(type): + """Metaclass for the DataplexService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] + _transport_registry["grpc"] = DataplexServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataplexServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataplexServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataplexServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataplexServiceClient(metaclass=DataplexServiceClientMeta): + """Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataplexServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataplexServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataplexServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def action_path(project: str,location: str,lake: str,action: str,) -> str: + """Returns a fully-qualified action string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) + + @staticmethod + def parse_action_path(path: str) -> Dict[str,str]: + """Parses a action path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/actions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def asset_path(project: str,location: str,lake: str,zone: str,asset: str,) -> str: + """Returns a fully-qualified asset string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) + + @staticmethod + def parse_asset_path(path: str) -> Dict[str,str]: + """Parses a asset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/assets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def environment_path(project: str,location: str,lake: str,environment: str,) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str,str]: + """Parses a environment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_path(project: str,location: str,lake: str,task: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def lake_path(project: str,location: str,lake: str,) -> str: + """Returns a fully-qualified lake string.""" + return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + + @staticmethod + def parse_lake_path(path: str) -> Dict[str,str]: + """Parses a lake path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def session_path(project: str,location: str,lake: str,environment: str,session: str,) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str,str]: + """Parses a session path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)/sessions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str,location: str,lake: str,task: str,) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str,str]: + """Parses a task path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def zone_path(project: str,location: str,lake: str,zone: str,) -> str: + """Returns a fully-qualified zone string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + + @staticmethod + def parse_zone_path(path: str) -> Dict[str,str]: + """Parses a zone path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataplexServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataplex service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataplexServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataplexServiceClient._read_environment_variables() + self._client_cert_source = DataplexServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DataplexServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataplexServiceTransport) + if transport_provided: + # transport is a DataplexServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataplexServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DataplexServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport]] = ( + DataplexServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataplexServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.DataplexServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "credentialsType": None, + } + ) + + def create_lake(self, + request: Optional[Union[service.CreateLakeRequest, dict]] = None, + *, + parent: Optional[str] = None, + lake: Optional[resources.Lake] = None, + lake_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]): + The request object. Create lake request. + parent (str): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Lake resource + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lake_id (str): + Required. Lake identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + + This corresponds to the ``lake_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, lake, lake_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateLakeRequest): + request = service.CreateLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lake is not None: + request.lake = lake + if lake_id is not None: + request.lake_id = lake_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_lake(self, + request: Optional[Union[service.UpdateLakeRequest, dict]] = None, + *, + lake: Optional[resources.Lake] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]): + The request object. Update lake request. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``lake`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the + organization distributed across many cloud projects, + and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources + attached to a lake are referred to as managed + resources. Data within these managed resources can be + structured or unstructured. A lake provides data + admins with tools to organize, secure and manage + their data at scale, and provides data scientists and + data engineers an integrated experience to easily + search, discover, analyze and transform data and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [lake, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateLakeRequest): + request = service.UpdateLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lake is not None: + request.lake = lake + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("lake.name", request.lake.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Lake, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_lake(self, + request: Optional[Union[service.DeleteLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]): + The request object. Delete lake request. + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteLakeRequest): + request = service.DeleteLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_lakes(self, + request: Optional[Union[service.ListLakesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLakesPager: + r"""Lists lake resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]): + The request object. List lakes request. + parent (str): + Required. The resource name of the lake location, of the + form: + ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager: + List lakes response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakesRequest): + request = service.ListLakesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_lakes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLakesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_lake(self, + request: Optional[Union[service.GetLakeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Lake: + r"""Retrieves a lake resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = client.get_lake(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]): + The request object. Get lake request. + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Lake: + A lake is a centralized repository + for managing enterprise data across the + organization distributed across many + cloud projects, and stored in a variety + of storage services such as Google Cloud + Storage and BigQuery. The resources + attached to a lake are referred to as + managed resources. Data within these + managed resources can be structured or + unstructured. A lake provides data + admins with tools to organize, secure + and manage their data at scale, and + provides data scientists and data + engineers an integrated experience to + easily search, discover, analyze and + transform data and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetLakeRequest): + request = service.GetLakeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_lake] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_lake_actions(self, + request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLakeActionsPager: + r"""Lists action resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]): + The request object. List lake actions request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListLakeActionsRequest): + request = service.ListLakeActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_lake_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLakeActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_zone(self, + request: Optional[Union[service.CreateZoneRequest, dict]] = None, + *, + parent: Optional[str] = None, + zone: Optional[resources.Zone] = None, + zone_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a zone resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]): + The request object. Create zone request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Zone resource. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_id (str): + Required. Zone identifier. This ID will be used to + generate names such as database and dataset names when + publishing metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in + a project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + + This corresponds to the ``zone_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, zone, zone_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateZoneRequest): + request = service.CreateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if zone is not None: + request.zone = zone + if zone_id is not None: + request.zone_id = zone_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_zone(self, + request: Optional[Union[service.UpdateZoneRequest, dict]] = None, + *, + zone: Optional[resources.Zone] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]): + The request object. Update zone request. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can + be used to map to organizational structure or + represent stages of data readiness from raw to + curated. It provides managing behavior that is shared + or inherited by all contained assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [zone, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateZoneRequest): + request = service.UpdateZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if zone is not None: + request.zone = zone + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("zone.name", request.zone.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Zone, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_zone(self, + request: Optional[Union[service.DeleteZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]): + The request object. Delete zone request. + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteZoneRequest): + request = service.DeleteZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_zones(self, + request: Optional[Union[service.ListZonesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListZonesPager: + r"""Lists zone resources in a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]): + The request object. List zones request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager: + List zones response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZonesRequest): + request = service.ListZonesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_zones] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListZonesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_zone(self, + request: Optional[Union[service.GetZoneRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Zone: + r"""Retrieves a zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]): + The request object. Get zone request. + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Zone: + A zone represents a logical group of + related assets within a lake. A zone can + be used to map to organizational + structure or represent stages of data + readiness from raw to curated. It + provides managing behavior that is + shared or inherited by all contained + assets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetZoneRequest): + request = service.GetZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_zone] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_zone_actions(self, + request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListZoneActionsPager: + r"""Lists action resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]): + The request object. List zone actions request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListZoneActionsRequest): + request = service.ListZoneActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_zone_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListZoneActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_asset(self, + request: Optional[Union[service.CreateAssetRequest, dict]] = None, + *, + parent: Optional[str] = None, + asset: Optional[resources.Asset] = None, + asset_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]): + The request object. Create asset request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Asset resource. + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_id (str): + Required. Asset identifier. This ID will be used to + generate names such as table names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + + This corresponds to the ``asset_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, asset, asset_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateAssetRequest): + request = service.CreateAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if asset is not None: + request.asset = asset + if asset_id is not None: + request.asset_id = asset_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_asset(self, + request: Optional[Union[service.UpdateAssetRequest, dict]] = None, + *, + asset: Optional[resources.Asset] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]): + The request object. Update asset request. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``asset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [asset, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateAssetRequest): + request = service.UpdateAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if asset is not None: + request.asset = asset + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("asset.name", request.asset.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Asset, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_asset(self, + request: Optional[Union[service.DeleteAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]): + The request object. Delete asset request. + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteAssetRequest): + request = service.DeleteAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_assets(self, + request: Optional[Union[service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsPager: + r"""Lists asset resources in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]): + The request object. List assets request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager: + List assets response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetsRequest): + request = service.ListAssetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_asset(self, + request: Optional[Union[service.GetAssetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Asset: + r"""Retrieves an asset resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = client.get_asset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]): + The request object. Get asset request. + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Asset: + An asset represents a cloud resource + that is being managed within a lake as a + member of a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetAssetRequest): + request = service.GetAssetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_asset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_asset_actions(self, + request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetActionsPager: + r"""Lists action resources in an asset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]): + The request object. List asset actions request. + parent (str): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager: + List actions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListAssetActionsRequest): + request = service.ListAssetActionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_asset_actions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssetActionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task(self, + request: Optional[Union[service.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[tasks.Task] = None, + task_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a task resource within a lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]): + The request object. Create task request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.dataplex_v1.types.Task): + Required. Task resource. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (str): + Required. Task identifier. + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, task, task_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTaskRequest): + request = service.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + if task_id is not None: + request.task_id = task_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_task(self, + request: Optional[Union[service.UpdateTaskRequest, dict]] = None, + *, + task: Optional[tasks.Task] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]): + The request object. Update task request. + task (google.cloud.dataplex_v1.types.Task): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.Task` A task + represents a user-visible job. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [task, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTaskRequest): + request = service.UpdateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if task is not None: + request.task = task + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("task.name", request.task.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + tasks.Task, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_task(self, + request: Optional[Union[service.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]): + The request object. Delete task request. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTaskRequest): + request = service.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_tasks(self, + request: Optional[Union[service.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTasksPager: + r"""Lists tasks under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]): + The request object. List tasks request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager: + List tasks response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTasksRequest): + request = service.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task(self, + request: Optional[Union[service.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tasks.Task: + r"""Get task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]): + The request object. Get task request. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Task: + A task represents a user-visible job. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTaskRequest): + request = service.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: Optional[Union[service.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListJobsPager: + r"""Lists Jobs under the given task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]): + The request object. List jobs request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager: + List jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListJobsRequest): + request = service.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_task(self, + request: Optional[Union[service.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.RunTaskResponse: + r"""Run an on demand execution of a Task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]): + The request object. + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.RunTaskResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.RunTaskRequest): + request = service.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: Optional[Union[service.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tasks.Job: + r"""Get job resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]): + The request object. Get job request. + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Job: + A job represents an instance of a + task. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetJobRequest): + request = service.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_job(self, + request: Optional[Union[service.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Cancel jobs running for the task resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]): + The request object. Cancel task jobs. + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CancelJobRequest): + request = service.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_environment(self, + request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + environment: Optional[analyze.Environment] = None, + environment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create an environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): + The request object. Create environment request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, environment, environment_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_environment(self, + request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, + *, + environment: Optional[analyze.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update the environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): + The request object. Update environment request. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [environment, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("environment.name", request.environment.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_environment(self, + request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): + The request object. Delete environment request. + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_environments(self, + request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEnvironmentsPager: + r"""Lists environments under the given lake. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): + The request object. List environments request. + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: + List environments response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnvironmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_environment(self, + request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): + The request object. Get environment request. + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions(self, + request: Optional[Union[service.ListSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists session resources in an environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): + The request object. List sessions request. + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: + List sessions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataplexServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DataplexServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py new file mode 100644 index 000000000000..718f88da504d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py @@ -0,0 +1,1420 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks + + +class ListLakesPager: + """A pager for iterating through ``list_lakes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``lakes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLakes`` requests and continue to iterate + through the ``lakes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListLakesResponse], + request: service.ListLakesRequest, + response: service.ListLakesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListLakesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListLakesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListLakesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Lake]: + for page in self.pages: + yield from page.lakes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakesAsyncPager: + """A pager for iterating through ``list_lakes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``lakes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLakes`` requests and continue to iterate + through the ``lakes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListLakesResponse]], + request: service.ListLakesRequest, + response: service.ListLakesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListLakesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListLakesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListLakesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Lake]: + async def async_generator(): + async for page in self.pages: + for response in page.lakes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakeActionsPager: + """A pager for iterating through ``list_lake_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLakeActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListLakeActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListLakeActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLakeActionsAsyncPager: + """A pager for iterating through ``list_lake_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLakeActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListLakeActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListLakeActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZonesPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListZonesResponse], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZonesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListZonesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Zone]: + for page in self.pages: + yield from page.zones + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZonesAsyncPager: + """A pager for iterating through ``list_zones`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``zones`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListZones`` requests and continue to iterate + through the ``zones`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListZonesResponse]], + request: service.ListZonesRequest, + response: service.ListZonesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZonesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListZonesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListZonesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListZonesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Zone]: + async def async_generator(): + async for page in self.pages: + for response in page.zones: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZoneActionsPager: + """A pager for iterating through ``list_zone_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListZoneActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListZoneActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListZoneActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListZoneActionsAsyncPager: + """A pager for iterating through ``list_zone_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListZoneActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListZoneActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListZoneActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetsPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListAssetsResponse], + request: service.ListAssetsRequest, + response: service.ListAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAssetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListAssetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Asset]: + for page in self.pages: + yield from page.assets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetsAsyncPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListAssetsResponse]], + request: service.ListAssetsRequest, + response: service.ListAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListAssetsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListAssetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Asset]: + async def async_generator(): + async for page in self.pages: + for response in page.assets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetActionsPager: + """A pager for iterating through ``list_asset_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssetActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListActionsResponse], + request: service.ListAssetActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListAssetActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Action]: + for page in self.pages: + yield from page.actions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetActionsAsyncPager: + """A pager for iterating through ``list_asset_actions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``actions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssetActions`` requests and continue to iterate + through the ``actions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListActionsResponse]], + request: service.ListAssetActionsRequest, + response: service.ListActionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListActionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListAssetActionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListActionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Action]: + async def async_generator(): + async for page in self.pages: + for response in page.actions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListTasksResponse], + request: service.ListTasksRequest, + response: service.ListTasksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListTasksRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListTasksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tasks.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListTasksResponse]], + request: service.ListTasksRequest, + response: service.ListTasksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListTasksRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListTasksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListTasksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tasks.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListJobsResponse], + request: service.ListJobsRequest, + response: service.ListJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tasks.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListJobsResponse]], + request: service.ListJobsRequest, + response: service.ListJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tasks.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnvironmentsPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListEnvironmentsResponse], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEnvironmentsAsyncPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., service.ListSessionsResponse], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[service.ListSessionsResponse]], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[analyze.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst new file mode 100644 index 000000000000..a70e22115784 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DataplexServiceTransport` is the ABC for all transports. +- public child `DataplexServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DataplexServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDataplexServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DataplexServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py new file mode 100644 index 000000000000..e68c264bc640 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataplexServiceTransport +from .grpc import DataplexServiceGrpcTransport +from .grpc_asyncio import DataplexServiceGrpcAsyncIOTransport +from .rest import DataplexServiceRestTransport +from .rest import DataplexServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] +_transport_registry['grpc'] = DataplexServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DataplexServiceGrpcAsyncIOTransport +_transport_registry['rest'] = DataplexServiceRestTransport + +__all__ = ( + 'DataplexServiceTransport', + 'DataplexServiceGrpcTransport', + 'DataplexServiceGrpcAsyncIOTransport', + 'DataplexServiceRestTransport', + 'DataplexServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py new file mode 100644 index 000000000000..3a63297e464e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py @@ -0,0 +1,838 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataplexServiceTransport(abc.ABC): + """Abstract transport class for DataplexService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_lake: gapic_v1.method.wrap_method( + self.create_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.update_lake: gapic_v1.method.wrap_method( + self.update_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_lake: gapic_v1.method.wrap_method( + self.delete_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.list_lakes: gapic_v1.method.wrap_method( + self.list_lakes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_lake: gapic_v1.method.wrap_method( + self.get_lake, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_lake_actions: gapic_v1.method.wrap_method( + self.list_lake_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: gapic_v1.method.wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: gapic_v1.method.wrap_method( + self.update_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: gapic_v1.method.wrap_method( + self.delete_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: gapic_v1.method.wrap_method( + self.list_zones, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: gapic_v1.method.wrap_method( + self.get_zone, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zone_actions: gapic_v1.method.wrap_method( + self.list_zone_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_asset: gapic_v1.method.wrap_method( + self.create_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_asset: gapic_v1.method.wrap_method( + self.update_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_asset: gapic_v1.method.wrap_method( + self.delete_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: gapic_v1.method.wrap_method( + self.list_assets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_asset: gapic_v1.method.wrap_method( + self.get_asset, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_asset_actions: gapic_v1.method.wrap_method( + self.list_asset_actions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=60.0, + client_info=client_info, + ), + self.update_task: gapic_v1.method.wrap_method( + self.update_task, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=None, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_environment: gapic_v1.method.wrap_method( + self.create_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.update_environment: gapic_v1.method.wrap_method( + self.update_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_environment: gapic_v1.method.wrap_method( + self.delete_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: gapic_v1.method.wrap_method( + self.list_environments, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_environment: gapic_v1.method.wrap_method( + self.get_environment, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + Union[ + service.ListLakesResponse, + Awaitable[service.ListLakesResponse] + ]]: + raise NotImplementedError() + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + Union[ + resources.Lake, + Awaitable[resources.Lake] + ]]: + raise NotImplementedError() + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + Union[ + service.ListZonesResponse, + Awaitable[service.ListZonesResponse] + ]]: + raise NotImplementedError() + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + Union[ + resources.Zone, + Awaitable[resources.Zone] + ]]: + raise NotImplementedError() + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + Union[ + service.ListAssetsResponse, + Awaitable[service.ListAssetsResponse] + ]]: + raise NotImplementedError() + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + Union[ + resources.Asset, + Awaitable[resources.Asset] + ]]: + raise NotImplementedError() + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + Union[ + service.ListActionsResponse, + Awaitable[service.ListActionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + Union[ + service.ListTasksResponse, + Awaitable[service.ListTasksResponse] + ]]: + raise NotImplementedError() + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + Union[ + tasks.Task, + Awaitable[tasks.Task] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + Union[ + service.ListJobsResponse, + Awaitable[service.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + Union[ + service.RunTaskResponse, + Awaitable[service.RunTaskResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + Union[ + tasks.Job, + Awaitable[tasks.Job] + ]]: + raise NotImplementedError() + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + Union[ + service.ListEnvironmentsResponse, + Awaitable[service.ListEnvironmentsResponse] + ]]: + raise NotImplementedError() + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + Union[ + analyze.Environment, + Awaitable[analyze.Environment] + ]]: + raise NotImplementedError() + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + Union[ + service.ListSessionsResponse, + Awaitable[service.ListSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataplexServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py new file mode 100644 index 000000000000..743313b14313 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py @@ -0,0 +1,1323 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataplexServiceGrpcTransport(DataplexServiceTransport): + """gRPC backend transport for DataplexService. + + Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the create lake method over gRPC. + + Creates a lake resource. + + Returns: + Callable[[~.CreateLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_lake' not in self._stubs: + self._stubs['create_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateLake', + request_serializer=service.CreateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_lake'] + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the update lake method over gRPC. + + Updates a lake resource. + + Returns: + Callable[[~.UpdateLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_lake' not in self._stubs: + self._stubs['update_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateLake', + request_serializer=service.UpdateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_lake'] + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete lake method over gRPC. + + Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + Returns: + Callable[[~.DeleteLakeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_lake' not in self._stubs: + self._stubs['delete_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteLake', + request_serializer=service.DeleteLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_lake'] + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + service.ListLakesResponse]: + r"""Return a callable for the list lakes method over gRPC. + + Lists lake resources in a project and location. + + Returns: + Callable[[~.ListLakesRequest], + ~.ListLakesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lakes' not in self._stubs: + self._stubs['list_lakes'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakes', + request_serializer=service.ListLakesRequest.serialize, + response_deserializer=service.ListLakesResponse.deserialize, + ) + return self._stubs['list_lakes'] + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + resources.Lake]: + r"""Return a callable for the get lake method over gRPC. + + Retrieves a lake resource. + + Returns: + Callable[[~.GetLakeRequest], + ~.Lake]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_lake' not in self._stubs: + self._stubs['get_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetLake', + request_serializer=service.GetLakeRequest.serialize, + response_deserializer=resources.Lake.deserialize, + ) + return self._stubs['get_lake'] + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list lake actions method over gRPC. + + Lists action resources in a lake. + + Returns: + Callable[[~.ListLakeActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lake_actions' not in self._stubs: + self._stubs['list_lake_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', + request_serializer=service.ListLakeActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_lake_actions'] + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the create zone method over gRPC. + + Creates a zone resource within a lake. + + Returns: + Callable[[~.CreateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_zone' not in self._stubs: + self._stubs['create_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateZone', + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_zone'] + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the update zone method over gRPC. + + Updates a zone resource. + + Returns: + Callable[[~.UpdateZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_zone' not in self._stubs: + self._stubs['update_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateZone', + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_zone'] + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + Returns: + Callable[[~.DeleteZoneRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_zone' not in self._stubs: + self._stubs['delete_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteZone', + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_zone'] + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + service.ListZonesResponse]: + r"""Return a callable for the list zones method over gRPC. + + Lists zone resources in a lake. + + Returns: + Callable[[~.ListZonesRequest], + ~.ListZonesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zones' not in self._stubs: + self._stubs['list_zones'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZones', + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs['list_zones'] + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + resources.Zone]: + r"""Return a callable for the get zone method over gRPC. + + Retrieves a zone resource. + + Returns: + Callable[[~.GetZoneRequest], + ~.Zone]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_zone' not in self._stubs: + self._stubs['get_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetZone', + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs['get_zone'] + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list zone actions method over gRPC. + + Lists action resources in a zone. + + Returns: + Callable[[~.ListZoneActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zone_actions' not in self._stubs: + self._stubs['list_zone_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', + request_serializer=service.ListZoneActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_zone_actions'] + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the create asset method over gRPC. + + Creates an asset resource. + + Returns: + Callable[[~.CreateAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_asset' not in self._stubs: + self._stubs['create_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateAsset', + request_serializer=service.CreateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_asset'] + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the update asset method over gRPC. + + Updates an asset resource. + + Returns: + Callable[[~.UpdateAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_asset' not in self._stubs: + self._stubs['update_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', + request_serializer=service.UpdateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_asset'] + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete asset method over gRPC. + + Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + Returns: + Callable[[~.DeleteAssetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_asset' not in self._stubs: + self._stubs['delete_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', + request_serializer=service.DeleteAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_asset'] + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + service.ListAssetsResponse]: + r"""Return a callable for the list assets method over gRPC. + + Lists asset resources in a zone. + + Returns: + Callable[[~.ListAssetsRequest], + ~.ListAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssets', + request_serializer=service.ListAssetsRequest.serialize, + response_deserializer=service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + resources.Asset]: + r"""Return a callable for the get asset method over gRPC. + + Retrieves an asset resource. + + Returns: + Callable[[~.GetAssetRequest], + ~.Asset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_asset' not in self._stubs: + self._stubs['get_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetAsset', + request_serializer=service.GetAssetRequest.serialize, + response_deserializer=resources.Asset.deserialize, + ) + return self._stubs['get_asset'] + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + service.ListActionsResponse]: + r"""Return a callable for the list asset actions method over gRPC. + + Lists action resources in an asset. + + Returns: + Callable[[~.ListAssetActionsRequest], + ~.ListActionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_asset_actions' not in self._stubs: + self._stubs['list_asset_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', + request_serializer=service.ListAssetActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_asset_actions'] + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the create task method over gRPC. + + Creates a task resource within a lake. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateTask', + request_serializer=service.CreateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_task'] + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the update task method over gRPC. + + Update the task resource. + + Returns: + Callable[[~.UpdateTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_task' not in self._stubs: + self._stubs['update_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateTask', + request_serializer=service.UpdateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_task'] + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete task method over gRPC. + + Delete the task resource. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteTask', + request_serializer=service.DeleteTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_task'] + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + service.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists tasks under the given lake. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListTasks', + request_serializer=service.ListTasksRequest.serialize, + response_deserializer=service.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + tasks.Task]: + r"""Return a callable for the get task method over gRPC. + + Get task resource. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetTask', + request_serializer=service.GetTaskRequest.serialize, + response_deserializer=tasks.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + service.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs under the given task. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListJobs', + request_serializer=service.ListJobsRequest.serialize, + response_deserializer=service.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + service.RunTaskResponse]: + r"""Return a callable for the run task method over gRPC. + + Run an on demand execution of a Task. + + Returns: + Callable[[~.RunTaskRequest], + ~.RunTaskResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/RunTask', + request_serializer=service.RunTaskRequest.serialize, + response_deserializer=service.RunTaskResponse.deserialize, + ) + return self._stubs['run_task'] + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + tasks.Job]: + r"""Return a callable for the get job method over gRPC. + + Get job resource. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetJob', + request_serializer=service.GetJobRequest.serialize, + response_deserializer=tasks.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel jobs running for the task resource. + + Returns: + Callable[[~.CancelJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CancelJob', + request_serializer=service.CancelJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_job'] + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_environment' not in self._stubs: + self._stubs['create_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_environment'] + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_environment' not in self._stubs: + self._stubs['update_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_environment'] + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_environment' not in self._stubs: + self._stubs['delete_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_environment'] + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + service.ListEnvironmentsResponse]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + ~.ListEnvironmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_environments' not in self._stubs: + self._stubs['list_environments'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs['list_environments'] + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + analyze.Environment]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + ~.Environment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_environment' not in self._stubs: + self._stubs['get_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs['get_environment'] + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + service.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListSessions', + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataplexServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bc8ec6359a6d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py @@ -0,0 +1,1669 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataplexServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataplexServiceGrpcAsyncIOTransport(DataplexServiceTransport): + """gRPC AsyncIO backend transport for DataplexService. + + Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create lake method over gRPC. + + Creates a lake resource. + + Returns: + Callable[[~.CreateLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_lake' not in self._stubs: + self._stubs['create_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateLake', + request_serializer=service.CreateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_lake'] + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update lake method over gRPC. + + Updates a lake resource. + + Returns: + Callable[[~.UpdateLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_lake' not in self._stubs: + self._stubs['update_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateLake', + request_serializer=service.UpdateLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_lake'] + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete lake method over gRPC. + + Deletes a lake resource. All zones within the lake + must be deleted before the lake can be deleted. + + Returns: + Callable[[~.DeleteLakeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_lake' not in self._stubs: + self._stubs['delete_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteLake', + request_serializer=service.DeleteLakeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_lake'] + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + Awaitable[service.ListLakesResponse]]: + r"""Return a callable for the list lakes method over gRPC. + + Lists lake resources in a project and location. + + Returns: + Callable[[~.ListLakesRequest], + Awaitable[~.ListLakesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lakes' not in self._stubs: + self._stubs['list_lakes'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakes', + request_serializer=service.ListLakesRequest.serialize, + response_deserializer=service.ListLakesResponse.deserialize, + ) + return self._stubs['list_lakes'] + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + Awaitable[resources.Lake]]: + r"""Return a callable for the get lake method over gRPC. + + Retrieves a lake resource. + + Returns: + Callable[[~.GetLakeRequest], + Awaitable[~.Lake]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_lake' not in self._stubs: + self._stubs['get_lake'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetLake', + request_serializer=service.GetLakeRequest.serialize, + response_deserializer=resources.Lake.deserialize, + ) + return self._stubs['get_lake'] + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list lake actions method over gRPC. + + Lists action resources in a lake. + + Returns: + Callable[[~.ListLakeActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_lake_actions' not in self._stubs: + self._stubs['list_lake_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', + request_serializer=service.ListLakeActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_lake_actions'] + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create zone method over gRPC. + + Creates a zone resource within a lake. + + Returns: + Callable[[~.CreateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_zone' not in self._stubs: + self._stubs['create_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateZone', + request_serializer=service.CreateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_zone'] + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update zone method over gRPC. + + Updates a zone resource. + + Returns: + Callable[[~.UpdateZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_zone' not in self._stubs: + self._stubs['update_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateZone', + request_serializer=service.UpdateZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_zone'] + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete zone method over gRPC. + + Deletes a zone resource. All assets within a zone + must be deleted before the zone can be deleted. + + Returns: + Callable[[~.DeleteZoneRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_zone' not in self._stubs: + self._stubs['delete_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteZone', + request_serializer=service.DeleteZoneRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_zone'] + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + Awaitable[service.ListZonesResponse]]: + r"""Return a callable for the list zones method over gRPC. + + Lists zone resources in a lake. + + Returns: + Callable[[~.ListZonesRequest], + Awaitable[~.ListZonesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zones' not in self._stubs: + self._stubs['list_zones'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZones', + request_serializer=service.ListZonesRequest.serialize, + response_deserializer=service.ListZonesResponse.deserialize, + ) + return self._stubs['list_zones'] + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + Awaitable[resources.Zone]]: + r"""Return a callable for the get zone method over gRPC. + + Retrieves a zone resource. + + Returns: + Callable[[~.GetZoneRequest], + Awaitable[~.Zone]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_zone' not in self._stubs: + self._stubs['get_zone'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetZone', + request_serializer=service.GetZoneRequest.serialize, + response_deserializer=resources.Zone.deserialize, + ) + return self._stubs['get_zone'] + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list zone actions method over gRPC. + + Lists action resources in a zone. + + Returns: + Callable[[~.ListZoneActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_zone_actions' not in self._stubs: + self._stubs['list_zone_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', + request_serializer=service.ListZoneActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_zone_actions'] + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create asset method over gRPC. + + Creates an asset resource. + + Returns: + Callable[[~.CreateAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_asset' not in self._stubs: + self._stubs['create_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateAsset', + request_serializer=service.CreateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_asset'] + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update asset method over gRPC. + + Updates an asset resource. + + Returns: + Callable[[~.UpdateAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_asset' not in self._stubs: + self._stubs['update_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', + request_serializer=service.UpdateAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_asset'] + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete asset method over gRPC. + + Deletes an asset resource. The referenced storage + resource is detached (default) or deleted based on the + associated Lifecycle policy. + + Returns: + Callable[[~.DeleteAssetRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_asset' not in self._stubs: + self._stubs['delete_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', + request_serializer=service.DeleteAssetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_asset'] + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + Awaitable[service.ListAssetsResponse]]: + r"""Return a callable for the list assets method over gRPC. + + Lists asset resources in a zone. + + Returns: + Callable[[~.ListAssetsRequest], + Awaitable[~.ListAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssets', + request_serializer=service.ListAssetsRequest.serialize, + response_deserializer=service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + Awaitable[resources.Asset]]: + r"""Return a callable for the get asset method over gRPC. + + Retrieves an asset resource. + + Returns: + Callable[[~.GetAssetRequest], + Awaitable[~.Asset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_asset' not in self._stubs: + self._stubs['get_asset'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetAsset', + request_serializer=service.GetAssetRequest.serialize, + response_deserializer=resources.Asset.deserialize, + ) + return self._stubs['get_asset'] + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + Awaitable[service.ListActionsResponse]]: + r"""Return a callable for the list asset actions method over gRPC. + + Lists action resources in an asset. + + Returns: + Callable[[~.ListAssetActionsRequest], + Awaitable[~.ListActionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_asset_actions' not in self._stubs: + self._stubs['list_asset_actions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', + request_serializer=service.ListAssetActionsRequest.serialize, + response_deserializer=service.ListActionsResponse.deserialize, + ) + return self._stubs['list_asset_actions'] + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task resource within a lake. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateTask', + request_serializer=service.CreateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_task'] + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update task method over gRPC. + + Update the task resource. + + Returns: + Callable[[~.UpdateTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_task' not in self._stubs: + self._stubs['update_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateTask', + request_serializer=service.UpdateTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_task'] + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete task method over gRPC. + + Delete the task resource. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteTask', + request_serializer=service.DeleteTaskRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_task'] + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + Awaitable[service.ListTasksResponse]]: + r"""Return a callable for the list tasks method over gRPC. + + Lists tasks under the given lake. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListTasks', + request_serializer=service.ListTasksRequest.serialize, + response_deserializer=service.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + Awaitable[tasks.Task]]: + r"""Return a callable for the get task method over gRPC. + + Get task resource. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetTask', + request_serializer=service.GetTaskRequest.serialize, + response_deserializer=tasks.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + Awaitable[service.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists Jobs under the given task. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListJobs', + request_serializer=service.ListJobsRequest.serialize, + response_deserializer=service.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + Awaitable[service.RunTaskResponse]]: + r"""Return a callable for the run task method over gRPC. + + Run an on demand execution of a Task. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.RunTaskResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/RunTask', + request_serializer=service.RunTaskRequest.serialize, + response_deserializer=service.RunTaskResponse.deserialize, + ) + return self._stubs['run_task'] + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + Awaitable[tasks.Job]]: + r"""Return a callable for the get job method over gRPC. + + Get job resource. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetJob', + request_serializer=service.GetJobRequest.serialize, + response_deserializer=tasks.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel jobs running for the task resource. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_job' not in self._stubs: + self._stubs['cancel_job'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CancelJob', + request_serializer=service.CancelJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_job'] + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_environment' not in self._stubs: + self._stubs['create_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_environment'] + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_environment' not in self._stubs: + self._stubs['update_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_environment'] + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_environment' not in self._stubs: + self._stubs['delete_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_environment'] + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + Awaitable[service.ListEnvironmentsResponse]]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + Awaitable[~.ListEnvironmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_environments' not in self._stubs: + self._stubs['list_environments'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs['list_environments'] + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + Awaitable[analyze.Environment]]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + Awaitable[~.Environment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_environment' not in self._stubs: + self._stubs['get_environment'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs['get_environment'] + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + Awaitable[service.ListSessionsResponse]]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.DataplexService/ListSessions', + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_lake: self._wrap_method( + self.create_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.update_lake: self._wrap_method( + self.update_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_lake: self._wrap_method( + self.delete_lake, + default_timeout=60.0, + client_info=client_info, + ), + self.list_lakes: self._wrap_method( + self.list_lakes, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_lake: self._wrap_method( + self.get_lake, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_lake_actions: self._wrap_method( + self.list_lake_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_zone: self._wrap_method( + self.create_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.update_zone: self._wrap_method( + self.update_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_zone: self._wrap_method( + self.delete_zone, + default_timeout=60.0, + client_info=client_info, + ), + self.list_zones: self._wrap_method( + self.list_zones, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_zone: self._wrap_method( + self.get_zone, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_zone_actions: self._wrap_method( + self.list_zone_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_asset: self._wrap_method( + self.create_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.update_asset: self._wrap_method( + self.update_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_asset: self._wrap_method( + self.delete_asset, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: self._wrap_method( + self.list_assets, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_asset: self._wrap_method( + self.get_asset, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_asset_actions: self._wrap_method( + self.list_asset_actions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_task: self._wrap_method( + self.create_task, + default_timeout=60.0, + client_info=client_info, + ), + self.update_task: self._wrap_method( + self.update_task, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_task: self._wrap_method( + self.delete_task, + default_timeout=60.0, + client_info=client_info, + ), + self.list_tasks: self._wrap_method( + self.list_tasks, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_task: self._wrap_method( + self.get_task, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: self._wrap_method( + self.list_jobs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.run_task: self._wrap_method( + self.run_task, + default_timeout=None, + client_info=client_info, + ), + self.get_job: self._wrap_method( + self.get_job, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_job: self._wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_environment: self._wrap_method( + self.create_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.update_environment: self._wrap_method( + self.update_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_environment: self._wrap_method( + self.delete_environment, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: self._wrap_method( + self.list_environments, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_environment: self._wrap_method( + self.get_environment, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_sessions: self._wrap_method( + self.list_sessions, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'DataplexServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py new file mode 100644 index 000000000000..cb83f1bbf4d0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py @@ -0,0 +1,6707 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseDataplexServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataplexServiceRestInterceptor: + """Interceptor for DataplexService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataplexServiceRestTransport. + + .. code-block:: python + class MyCustomDataplexServiceInterceptor(DataplexServiceRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_asset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_asset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_lake(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_lake(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_asset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_asset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_lake(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_lake(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_asset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_asset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_lake(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_lake(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_zone(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_asset_actions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_asset_actions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_assets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_assets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_environments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_environments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_lake_actions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_lake_actions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_lakes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_lakes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_zone_actions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_zone_actions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_zones(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_zones(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_asset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_asset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_lake(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_lake(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_zone(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_zone(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataplexServiceRestTransport(interceptor=MyCustomDataplexServiceInterceptor()) + client = DataplexServiceClient(transport=transport) + + + """ + def pre_cancel_job(self, request: service.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def pre_create_asset(self, request: service.CreateAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_asset + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_create_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_asset + + DEPRECATED. Please use the `post_create_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_create_asset` interceptor runs + before the `post_create_asset_with_metadata` interceptor. + """ + return response + + def post_create_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_create_asset_with_metadata` + interceptor in new development instead of the `post_create_asset` interceptor. + When both interceptors are used, this `post_create_asset_with_metadata` interceptor runs after the + `post_create_asset` interceptor. The (possibly modified) response returned by + `post_create_asset` will be passed to + `post_create_asset_with_metadata`. + """ + return response, metadata + + def pre_create_environment(self, request: service.CreateEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_create_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_environment + + DEPRECATED. Please use the `post_create_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_create_environment` interceptor runs + before the `post_create_environment_with_metadata` interceptor. + """ + return response + + def post_create_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_create_environment_with_metadata` + interceptor in new development instead of the `post_create_environment` interceptor. + When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the + `post_create_environment` interceptor. The (possibly modified) response returned by + `post_create_environment` will be passed to + `post_create_environment_with_metadata`. + """ + return response, metadata + + def pre_create_lake(self, request: service.CreateLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_lake + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_create_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_lake + + DEPRECATED. Please use the `post_create_lake_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_create_lake` interceptor runs + before the `post_create_lake_with_metadata` interceptor. + """ + return response + + def post_create_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_lake + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_create_lake_with_metadata` + interceptor in new development instead of the `post_create_lake` interceptor. + When both interceptors are used, this `post_create_lake_with_metadata` interceptor runs after the + `post_create_lake` interceptor. The (possibly modified) response returned by + `post_create_lake` will be passed to + `post_create_lake_with_metadata`. + """ + return response, metadata + + def pre_create_task(self, request: service.CreateTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_create_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_task + + DEPRECATED. Please use the `post_create_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_create_task` interceptor runs + before the `post_create_task_with_metadata` interceptor. + """ + return response + + def post_create_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_create_task_with_metadata` + interceptor in new development instead of the `post_create_task` interceptor. + When both interceptors are used, this `post_create_task_with_metadata` interceptor runs after the + `post_create_task` interceptor. The (possibly modified) response returned by + `post_create_task` will be passed to + `post_create_task_with_metadata`. + """ + return response, metadata + + def pre_create_zone(self, request: service.CreateZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_create_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_zone + + DEPRECATED. Please use the `post_create_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_create_zone` interceptor runs + before the `post_create_zone_with_metadata` interceptor. + """ + return response + + def post_create_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_create_zone_with_metadata` + interceptor in new development instead of the `post_create_zone` interceptor. + When both interceptors are used, this `post_create_zone_with_metadata` interceptor runs after the + `post_create_zone` interceptor. The (possibly modified) response returned by + `post_create_zone` will be passed to + `post_create_zone_with_metadata`. + """ + return response, metadata + + def pre_delete_asset(self, request: service.DeleteAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_asset + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_asset + + DEPRECATED. Please use the `post_delete_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_delete_asset` interceptor runs + before the `post_delete_asset_with_metadata` interceptor. + """ + return response + + def post_delete_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_delete_asset_with_metadata` + interceptor in new development instead of the `post_delete_asset` interceptor. + When both interceptors are used, this `post_delete_asset_with_metadata` interceptor runs after the + `post_delete_asset` interceptor. The (possibly modified) response returned by + `post_delete_asset` will be passed to + `post_delete_asset_with_metadata`. + """ + return response, metadata + + def pre_delete_environment(self, request: service.DeleteEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_environment + + DEPRECATED. Please use the `post_delete_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_delete_environment` interceptor runs + before the `post_delete_environment_with_metadata` interceptor. + """ + return response + + def post_delete_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_delete_environment_with_metadata` + interceptor in new development instead of the `post_delete_environment` interceptor. + When both interceptors are used, this `post_delete_environment_with_metadata` interceptor runs after the + `post_delete_environment` interceptor. The (possibly modified) response returned by + `post_delete_environment` will be passed to + `post_delete_environment_with_metadata`. + """ + return response, metadata + + def pre_delete_lake(self, request: service.DeleteLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_lake + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_lake + + DEPRECATED. Please use the `post_delete_lake_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_delete_lake` interceptor runs + before the `post_delete_lake_with_metadata` interceptor. + """ + return response + + def post_delete_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_lake + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_delete_lake_with_metadata` + interceptor in new development instead of the `post_delete_lake` interceptor. + When both interceptors are used, this `post_delete_lake_with_metadata` interceptor runs after the + `post_delete_lake` interceptor. The (possibly modified) response returned by + `post_delete_lake` will be passed to + `post_delete_lake_with_metadata`. + """ + return response, metadata + + def pre_delete_task(self, request: service.DeleteTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_task + + DEPRECATED. Please use the `post_delete_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_delete_task` interceptor runs + before the `post_delete_task_with_metadata` interceptor. + """ + return response + + def post_delete_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_delete_task_with_metadata` + interceptor in new development instead of the `post_delete_task` interceptor. + When both interceptors are used, this `post_delete_task_with_metadata` interceptor runs after the + `post_delete_task` interceptor. The (possibly modified) response returned by + `post_delete_task` will be passed to + `post_delete_task_with_metadata`. + """ + return response, metadata + + def pre_delete_zone(self, request: service.DeleteZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_zone + + DEPRECATED. Please use the `post_delete_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_delete_zone` interceptor runs + before the `post_delete_zone_with_metadata` interceptor. + """ + return response + + def post_delete_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_delete_zone_with_metadata` + interceptor in new development instead of the `post_delete_zone` interceptor. + When both interceptors are used, this `post_delete_zone_with_metadata` interceptor runs after the + `post_delete_zone` interceptor. The (possibly modified) response returned by + `post_delete_zone` will be passed to + `post_delete_zone_with_metadata`. + """ + return response, metadata + + def pre_get_asset(self, request: service.GetAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_asset + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_asset(self, response: resources.Asset) -> resources.Asset: + """Post-rpc interceptor for get_asset + + DEPRECATED. Please use the `post_get_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_asset` interceptor runs + before the `post_get_asset_with_metadata` interceptor. + """ + return response + + def post_get_asset_with_metadata(self, response: resources.Asset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Asset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_asset_with_metadata` + interceptor in new development instead of the `post_get_asset` interceptor. + When both interceptors are used, this `post_get_asset_with_metadata` interceptor runs after the + `post_get_asset` interceptor. The (possibly modified) response returned by + `post_get_asset` will be passed to + `post_get_asset_with_metadata`. + """ + return response, metadata + + def pre_get_environment(self, request: service.GetEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_environment(self, response: analyze.Environment) -> analyze.Environment: + """Post-rpc interceptor for get_environment + + DEPRECATED. Please use the `post_get_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_environment` interceptor runs + before the `post_get_environment_with_metadata` interceptor. + """ + return response + + def post_get_environment_with_metadata(self, response: analyze.Environment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_environment_with_metadata` + interceptor in new development instead of the `post_get_environment` interceptor. + When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the + `post_get_environment` interceptor. The (possibly modified) response returned by + `post_get_environment` will be passed to + `post_get_environment_with_metadata`. + """ + return response, metadata + + def pre_get_job(self, request: service.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_job(self, response: tasks.Job) -> tasks.Job: + """Post-rpc interceptor for get_job + + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. + """ + return response + + def post_get_job_with_metadata(self, response: tasks.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[tasks.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + + def pre_get_lake(self, request: service.GetLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_lake + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_lake(self, response: resources.Lake) -> resources.Lake: + """Post-rpc interceptor for get_lake + + DEPRECATED. Please use the `post_get_lake_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_lake` interceptor runs + before the `post_get_lake_with_metadata` interceptor. + """ + return response + + def post_get_lake_with_metadata(self, response: resources.Lake, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Lake, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_lake + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_lake_with_metadata` + interceptor in new development instead of the `post_get_lake` interceptor. + When both interceptors are used, this `post_get_lake_with_metadata` interceptor runs after the + `post_get_lake` interceptor. The (possibly modified) response returned by + `post_get_lake` will be passed to + `post_get_lake_with_metadata`. + """ + return response, metadata + + def pre_get_task(self, request: service.GetTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_task(self, response: tasks.Task) -> tasks.Task: + """Post-rpc interceptor for get_task + + DEPRECATED. Please use the `post_get_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_task` interceptor runs + before the `post_get_task_with_metadata` interceptor. + """ + return response + + def post_get_task_with_metadata(self, response: tasks.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[tasks.Task, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_task_with_metadata` + interceptor in new development instead of the `post_get_task` interceptor. + When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the + `post_get_task` interceptor. The (possibly modified) response returned by + `post_get_task` will be passed to + `post_get_task_with_metadata`. + """ + return response, metadata + + def pre_get_zone(self, request: service.GetZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_zone(self, response: resources.Zone) -> resources.Zone: + """Post-rpc interceptor for get_zone + + DEPRECATED. Please use the `post_get_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_get_zone` interceptor runs + before the `post_get_zone_with_metadata` interceptor. + """ + return response + + def post_get_zone_with_metadata(self, response: resources.Zone, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Zone, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_get_zone_with_metadata` + interceptor in new development instead of the `post_get_zone` interceptor. + When both interceptors are used, this `post_get_zone_with_metadata` interceptor runs after the + `post_get_zone` interceptor. The (possibly modified) response returned by + `post_get_zone` will be passed to + `post_get_zone_with_metadata`. + """ + return response, metadata + + def pre_list_asset_actions(self, request: service.ListAssetActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_asset_actions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_asset_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: + """Post-rpc interceptor for list_asset_actions + + DEPRECATED. Please use the `post_list_asset_actions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_asset_actions` interceptor runs + before the `post_list_asset_actions_with_metadata` interceptor. + """ + return response + + def post_list_asset_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_asset_actions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_asset_actions_with_metadata` + interceptor in new development instead of the `post_list_asset_actions` interceptor. + When both interceptors are used, this `post_list_asset_actions_with_metadata` interceptor runs after the + `post_list_asset_actions` interceptor. The (possibly modified) response returned by + `post_list_asset_actions` will be passed to + `post_list_asset_actions_with_metadata`. + """ + return response, metadata + + def pre_list_assets(self, request: service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_assets(self, response: service.ListAssetsResponse) -> service.ListAssetsResponse: + """Post-rpc interceptor for list_assets + + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. + """ + return response + + def post_list_assets_with_metadata(self, response: service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. + """ + return response, metadata + + def pre_list_environments(self, request: service.ListEnvironmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListEnvironmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_environments + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_environments(self, response: service.ListEnvironmentsResponse) -> service.ListEnvironmentsResponse: + """Post-rpc interceptor for list_environments + + DEPRECATED. Please use the `post_list_environments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_environments` interceptor runs + before the `post_list_environments_with_metadata` interceptor. + """ + return response + + def post_list_environments_with_metadata(self, response: service.ListEnvironmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_environments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_environments_with_metadata` + interceptor in new development instead of the `post_list_environments` interceptor. + When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the + `post_list_environments` interceptor. The (possibly modified) response returned by + `post_list_environments` will be passed to + `post_list_environments_with_metadata`. + """ + return response, metadata + + def pre_list_jobs(self, request: service.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_jobs(self, response: service.ListJobsResponse) -> service.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. + """ + return response + + def post_list_jobs_with_metadata(self, response: service.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + + def pre_list_lake_actions(self, request: service.ListLakeActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakeActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_lake_actions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_lake_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: + """Post-rpc interceptor for list_lake_actions + + DEPRECATED. Please use the `post_list_lake_actions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_lake_actions` interceptor runs + before the `post_list_lake_actions_with_metadata` interceptor. + """ + return response + + def post_list_lake_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_lake_actions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_lake_actions_with_metadata` + interceptor in new development instead of the `post_list_lake_actions` interceptor. + When both interceptors are used, this `post_list_lake_actions_with_metadata` interceptor runs after the + `post_list_lake_actions` interceptor. The (possibly modified) response returned by + `post_list_lake_actions` will be passed to + `post_list_lake_actions_with_metadata`. + """ + return response, metadata + + def pre_list_lakes(self, request: service.ListLakesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_lakes + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_lakes(self, response: service.ListLakesResponse) -> service.ListLakesResponse: + """Post-rpc interceptor for list_lakes + + DEPRECATED. Please use the `post_list_lakes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_lakes` interceptor runs + before the `post_list_lakes_with_metadata` interceptor. + """ + return response + + def post_list_lakes_with_metadata(self, response: service.ListLakesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_lakes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_lakes_with_metadata` + interceptor in new development instead of the `post_list_lakes` interceptor. + When both interceptors are used, this `post_list_lakes_with_metadata` interceptor runs after the + `post_list_lakes` interceptor. The (possibly modified) response returned by + `post_list_lakes` will be passed to + `post_list_lakes_with_metadata`. + """ + return response, metadata + + def pre_list_sessions(self, request: service.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_sessions(self, response: service.ListSessionsResponse) -> service.ListSessionsResponse: + """Post-rpc interceptor for list_sessions + + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. + """ + return response + + def post_list_sessions_with_metadata(self, response: service.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + + def pre_list_tasks(self, request: service.ListTasksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_tasks(self, response: service.ListTasksResponse) -> service.ListTasksResponse: + """Post-rpc interceptor for list_tasks + + DEPRECATED. Please use the `post_list_tasks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_tasks` interceptor runs + before the `post_list_tasks_with_metadata` interceptor. + """ + return response + + def post_list_tasks_with_metadata(self, response: service.ListTasksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tasks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_tasks_with_metadata` + interceptor in new development instead of the `post_list_tasks` interceptor. + When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the + `post_list_tasks` interceptor. The (possibly modified) response returned by + `post_list_tasks` will be passed to + `post_list_tasks_with_metadata`. + """ + return response, metadata + + def pre_list_zone_actions(self, request: service.ListZoneActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZoneActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_zone_actions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_zone_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: + """Post-rpc interceptor for list_zone_actions + + DEPRECATED. Please use the `post_list_zone_actions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_zone_actions` interceptor runs + before the `post_list_zone_actions_with_metadata` interceptor. + """ + return response + + def post_list_zone_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_zone_actions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_zone_actions_with_metadata` + interceptor in new development instead of the `post_list_zone_actions` interceptor. + When both interceptors are used, this `post_list_zone_actions_with_metadata` interceptor runs after the + `post_list_zone_actions` interceptor. The (possibly modified) response returned by + `post_list_zone_actions` will be passed to + `post_list_zone_actions_with_metadata`. + """ + return response, metadata + + def pre_list_zones(self, request: service.ListZonesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZonesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_zones + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_zones(self, response: service.ListZonesResponse) -> service.ListZonesResponse: + """Post-rpc interceptor for list_zones + + DEPRECATED. Please use the `post_list_zones_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_list_zones` interceptor runs + before the `post_list_zones_with_metadata` interceptor. + """ + return response + + def post_list_zones_with_metadata(self, response: service.ListZonesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZonesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_zones + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_list_zones_with_metadata` + interceptor in new development instead of the `post_list_zones` interceptor. + When both interceptors are used, this `post_list_zones_with_metadata` interceptor runs after the + `post_list_zones` interceptor. The (possibly modified) response returned by + `post_list_zones` will be passed to + `post_list_zones_with_metadata`. + """ + return response, metadata + + def pre_run_task(self, request: service.RunTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.RunTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for run_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_run_task(self, response: service.RunTaskResponse) -> service.RunTaskResponse: + """Post-rpc interceptor for run_task + + DEPRECATED. Please use the `post_run_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_run_task` interceptor runs + before the `post_run_task_with_metadata` interceptor. + """ + return response + + def post_run_task_with_metadata(self, response: service.RunTaskResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.RunTaskResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_run_task_with_metadata` + interceptor in new development instead of the `post_run_task` interceptor. + When both interceptors are used, this `post_run_task_with_metadata` interceptor runs after the + `post_run_task` interceptor. The (possibly modified) response returned by + `post_run_task` will be passed to + `post_run_task_with_metadata`. + """ + return response, metadata + + def pre_update_asset(self, request: service.UpdateAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_asset + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_update_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_asset + + DEPRECATED. Please use the `post_update_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_update_asset` interceptor runs + before the `post_update_asset_with_metadata` interceptor. + """ + return response + + def post_update_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_update_asset_with_metadata` + interceptor in new development instead of the `post_update_asset` interceptor. + When both interceptors are used, this `post_update_asset_with_metadata` interceptor runs after the + `post_update_asset` interceptor. The (possibly modified) response returned by + `post_update_asset` will be passed to + `post_update_asset_with_metadata`. + """ + return response, metadata + + def pre_update_environment(self, request: service.UpdateEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_update_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_environment + + DEPRECATED. Please use the `post_update_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_update_environment` interceptor runs + before the `post_update_environment_with_metadata` interceptor. + """ + return response + + def post_update_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_update_environment_with_metadata` + interceptor in new development instead of the `post_update_environment` interceptor. + When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the + `post_update_environment` interceptor. The (possibly modified) response returned by + `post_update_environment` will be passed to + `post_update_environment_with_metadata`. + """ + return response, metadata + + def pre_update_lake(self, request: service.UpdateLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_lake + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_update_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_lake + + DEPRECATED. Please use the `post_update_lake_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_update_lake` interceptor runs + before the `post_update_lake_with_metadata` interceptor. + """ + return response + + def post_update_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_lake + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_update_lake_with_metadata` + interceptor in new development instead of the `post_update_lake` interceptor. + When both interceptors are used, this `post_update_lake_with_metadata` interceptor runs after the + `post_update_lake` interceptor. The (possibly modified) response returned by + `post_update_lake` will be passed to + `post_update_lake_with_metadata`. + """ + return response, metadata + + def pre_update_task(self, request: service.UpdateTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_update_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_task + + DEPRECATED. Please use the `post_update_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_update_task` interceptor runs + before the `post_update_task_with_metadata` interceptor. + """ + return response + + def post_update_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_update_task_with_metadata` + interceptor in new development instead of the `post_update_task` interceptor. + When both interceptors are used, this `post_update_task_with_metadata` interceptor runs after the + `post_update_task` interceptor. The (possibly modified) response returned by + `post_update_task` will be passed to + `post_update_task_with_metadata`. + """ + return response, metadata + + def pre_update_zone(self, request: service.UpdateZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_zone + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_update_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_zone + + DEPRECATED. Please use the `post_update_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. This `post_update_zone` interceptor runs + before the `post_update_zone_with_metadata` interceptor. + """ + return response + + def post_update_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataplexService server but before it is returned to user code. + + We recommend only using this `post_update_zone_with_metadata` + interceptor in new development instead of the `post_update_zone` interceptor. + When both interceptors are used, this `post_update_zone_with_metadata` interceptor runs after the + `post_update_zone` interceptor. The (possibly modified) response returned by + `post_update_zone` will be passed to + `post_update_zone_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataplexService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataplexService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataplexServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataplexServiceRestInterceptor + + +class DataplexServiceRestTransport(_BaseDataplexServiceRestTransport): + """REST backend synchronous transport for DataplexService. + + Dataplex service provides data lakes as a service. The + primary resources offered by this service are Lakes, Zones and + Assets which collectively allow a data administrator to + organize, manage, secure and catalog data across their + organization located across cloud projects in a variety of + storage systems including Cloud Storage and BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DataplexServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataplexServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CancelJob(_BaseDataplexServiceRestTransport._BaseCancelJob, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CancelJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CancelJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the cancel job method over HTTP. + + Args: + request (~.service.CancelJobRequest): + The request object. Cancel task jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCancelJob._get_http_options() + + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CancelJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CancelJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateAsset(_BaseDataplexServiceRestTransport._BaseCreateAsset, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CreateAsset") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CreateAssetRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create asset method over HTTP. + + Args: + request (~.service.CreateAssetRequest): + The request object. Create asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_http_options() + + request, metadata = self._interceptor.pre_create_asset(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateAsset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateAsset", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CreateAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_asset_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_asset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateAsset", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateEnvironment(_BaseDataplexServiceRestTransport._BaseCreateEnvironment, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CreateEnvironment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CreateEnvironmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create environment method over HTTP. + + Args: + request (~.service.CreateEnvironmentRequest): + The request object. Create environment request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_http_options() + + request, metadata = self._interceptor.pre_create_environment(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateEnvironment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateEnvironment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CreateEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_environment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_environment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateEnvironment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateLake(_BaseDataplexServiceRestTransport._BaseCreateLake, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CreateLake") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CreateLakeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create lake method over HTTP. + + Args: + request (~.service.CreateLakeRequest): + The request object. Create lake request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCreateLake._get_http_options() + + request, metadata = self._interceptor.pre_create_lake(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateLake._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCreateLake._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCreateLake._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateLake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateLake", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CreateLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_lake(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_lake_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_lake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateLake", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateTask(_BaseDataplexServiceRestTransport._BaseCreateTask, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CreateTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CreateTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create task method over HTTP. + + Args: + request (~.service.CreateTaskRequest): + The request object. Create task request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCreateTask._get_http_options() + + request, metadata = self._interceptor.pre_create_task(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateTask._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCreateTask._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCreateTask._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateTask", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CreateTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_task_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_task", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateZone(_BaseDataplexServiceRestTransport._BaseCreateZone, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CreateZone") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.CreateZoneRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create zone method over HTTP. + + Args: + request (~.service.CreateZoneRequest): + The request object. Create zone request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCreateZone._get_http_options() + + request, metadata = self._interceptor.pre_create_zone(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateZone._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCreateZone._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCreateZone._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateZone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateZone", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CreateZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_zone_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_zone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CreateZone", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAsset(_BaseDataplexServiceRestTransport._BaseDeleteAsset, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteAsset") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.DeleteAssetRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete asset method over HTTP. + + Args: + request (~.service.DeleteAssetRequest): + The request object. Delete asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_http_options() + + request, metadata = self._interceptor.pre_delete_asset(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteAsset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteAsset", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_asset_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteAsset", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEnvironment(_BaseDataplexServiceRestTransport._BaseDeleteEnvironment, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteEnvironment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.DeleteEnvironmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete environment method over HTTP. + + Args: + request (~.service.DeleteEnvironmentRequest): + The request object. Delete environment request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_http_options() + + request, metadata = self._interceptor.pre_delete_environment(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteEnvironment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteEnvironment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_environment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteEnvironment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteLake(_BaseDataplexServiceRestTransport._BaseDeleteLake, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteLake") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.DeleteLakeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete lake method over HTTP. + + Args: + request (~.service.DeleteLakeRequest): + The request object. Delete lake request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_http_options() + + request, metadata = self._interceptor.pre_delete_lake(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteLake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteLake", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_lake(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_lake_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteLake", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteTask(_BaseDataplexServiceRestTransport._BaseDeleteTask, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.DeleteTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete task method over HTTP. + + Args: + request (~.service.DeleteTaskRequest): + The request object. Delete task request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_http_options() + + request, metadata = self._interceptor.pre_delete_task(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteTask", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_task_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_task", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteZone(_BaseDataplexServiceRestTransport._BaseDeleteZone, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteZone") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.DeleteZoneRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete zone method over HTTP. + + Args: + request (~.service.DeleteZoneRequest): + The request object. Delete zone request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_http_options() + + request, metadata = self._interceptor.pre_delete_zone(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteZone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteZone", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_zone_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteZone", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAsset(_BaseDataplexServiceRestTransport._BaseGetAsset, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetAsset") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetAssetRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> resources.Asset: + r"""Call the get asset method over HTTP. + + Args: + request (~.service.GetAssetRequest): + The request object. Get asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.Asset: + An asset represents a cloud resource + that is being managed within a lake as a + member of a zone. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetAsset._get_http_options() + + request, metadata = self._interceptor.pre_get_asset(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetAsset._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetAsset._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetAsset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetAsset", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Asset() + pb_resp = resources.Asset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_asset_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = resources.Asset.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_asset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetAsset", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetEnvironment(_BaseDataplexServiceRestTransport._BaseGetEnvironment, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetEnvironment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetEnvironmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> analyze.Environment: + r"""Call the get environment method over HTTP. + + Args: + request (~.service.GetEnvironmentRequest): + The request object. Get environment request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.analyze.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_http_options() + + request, metadata = self._interceptor.pre_get_environment(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetEnvironment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetEnvironment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analyze.Environment() + pb_resp = analyze.Environment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = analyze.Environment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_environment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetEnvironment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetJob(_BaseDataplexServiceRestTransport._BaseGetJob, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> tasks.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.service.GetJobRequest): + The request object. Get job request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.tasks.Job: + A job represents an instance of a + task. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetJob._get_http_options() + + request, metadata = self._interceptor.pre_get_job(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetJob._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetJob._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetJob", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tasks.Job() + pb_resp = tasks.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = tasks.Job.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_job", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetLake(_BaseDataplexServiceRestTransport._BaseGetLake, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetLake") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetLakeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> resources.Lake: + r"""Call the get lake method over HTTP. + + Args: + request (~.service.GetLakeRequest): + The request object. Get lake request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.Lake: + A lake is a centralized repository + for managing enterprise data across the + organization distributed across many + cloud projects, and stored in a variety + of storage services such as Google Cloud + Storage and BigQuery. The resources + attached to a lake are referred to as + managed resources. Data within these + managed resources can be structured or + unstructured. A lake provides data + admins with tools to organize, secure + and manage their data at scale, and + provides data scientists and data + engineers an integrated experience to + easily search, discover, analyze and + transform data and associated metadata. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetLake._get_http_options() + + request, metadata = self._interceptor.pre_get_lake(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetLake._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetLake._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetLake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetLake", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Lake() + pb_resp = resources.Lake.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_lake(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_lake_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = resources.Lake.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_lake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetLake", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetTask(_BaseDataplexServiceRestTransport._BaseGetTask, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> tasks.Task: + r"""Call the get task method over HTTP. + + Args: + request (~.service.GetTaskRequest): + The request object. Get task request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.tasks.Task: + A task represents a user-visible job. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetTask._get_http_options() + + request, metadata = self._interceptor.pre_get_task(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetTask._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetTask._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetTask", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tasks.Task() + pb_resp = tasks.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_task_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = tasks.Task.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_task", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetZone(_BaseDataplexServiceRestTransport._BaseGetZone, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetZone") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.GetZoneRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> resources.Zone: + r"""Call the get zone method over HTTP. + + Args: + request (~.service.GetZoneRequest): + The request object. Get zone request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.Zone: + A zone represents a logical group of + related assets within a lake. A zone can + be used to map to organizational + structure or represent stages of data + readiness from raw to curated. It + provides managing behavior that is + shared or inherited by all contained + assets. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetZone._get_http_options() + + request, metadata = self._interceptor.pre_get_zone(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetZone._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetZone._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetZone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetZone", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Zone() + pb_resp = resources.Zone.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_zone_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = resources.Zone.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_zone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetZone", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAssetActions(_BaseDataplexServiceRestTransport._BaseListAssetActions, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListAssetActions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListAssetActionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListActionsResponse: + r"""Call the list asset actions method over HTTP. + + Args: + request (~.service.ListAssetActionsRequest): + The request object. List asset actions request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListActionsResponse: + List actions response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_http_options() + + request, metadata = self._interceptor.pre_list_asset_actions(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListAssetActions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListAssetActions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListAssetActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListActionsResponse() + pb_resp = service.ListActionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_asset_actions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_asset_actions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListActionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListAssetActions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAssets(_BaseDataplexServiceRestTransport._BaseListAssets, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListAssets") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListAssetsResponse: + r"""Call the list assets method over HTTP. + + Args: + request (~.service.ListAssetsRequest): + The request object. List assets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListAssetsResponse: + List assets response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListAssets._get_http_options() + + request, metadata = self._interceptor.pre_list_assets(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListAssets", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListAssets", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListAssetsResponse() + pb_resp = service.ListAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListAssetsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_assets", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEnvironments(_BaseDataplexServiceRestTransport._BaseListEnvironments, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListEnvironments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListEnvironmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListEnvironmentsResponse: + r"""Call the list environments method over HTTP. + + Args: + request (~.service.ListEnvironmentsRequest): + The request object. List environments request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListEnvironmentsResponse: + List environments response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_http_options() + + request, metadata = self._interceptor.pre_list_environments(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListEnvironments", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListEnvironments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListEnvironments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListEnvironmentsResponse() + pb_resp = service.ListEnvironmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_environments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_environments_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListEnvironmentsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_environments", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListEnvironments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListJobs(_BaseDataplexServiceRestTransport._BaseListJobs, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListJobs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.service.ListJobsRequest): + The request object. List jobs request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListJobsResponse: + List jobs response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListJobs._get_http_options() + + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListJobs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListJobs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListJobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListJobs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListJobsResponse() + pb_resp = service.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListJobsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListJobs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListLakeActions(_BaseDataplexServiceRestTransport._BaseListLakeActions, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListLakeActions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListLakeActionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListActionsResponse: + r"""Call the list lake actions method over HTTP. + + Args: + request (~.service.ListLakeActionsRequest): + The request object. List lake actions request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListActionsResponse: + List actions response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_http_options() + + request, metadata = self._interceptor.pre_list_lake_actions(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLakeActions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLakeActions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListLakeActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListActionsResponse() + pb_resp = service.ListActionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_lake_actions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_lake_actions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListActionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLakeActions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListLakes(_BaseDataplexServiceRestTransport._BaseListLakes, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListLakes") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListLakesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListLakesResponse: + r"""Call the list lakes method over HTTP. + + Args: + request (~.service.ListLakesRequest): + The request object. List lakes request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListLakesResponse: + List lakes response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListLakes._get_http_options() + + request, metadata = self._interceptor.pre_list_lakes(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListLakes._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListLakes._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLakes", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLakes", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListLakes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListLakesResponse() + pb_resp = service.ListLakesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_lakes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_lakes_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListLakesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLakes", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSessions(_BaseDataplexServiceRestTransport._BaseListSessions, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListSessions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListSessionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListSessionsResponse: + r"""Call the list sessions method over HTTP. + + Args: + request (~.service.ListSessionsRequest): + The request object. List sessions request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListSessionsResponse: + List sessions response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListSessions._get_http_options() + + request, metadata = self._interceptor.pre_list_sessions(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListSessions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListSessions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListSessions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListSessions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListSessionsResponse() + pb_resp = service.ListSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListSessionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListSessions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTasks(_BaseDataplexServiceRestTransport._BaseListTasks, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListTasks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListTasksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListTasksResponse: + r"""Call the list tasks method over HTTP. + + Args: + request (~.service.ListTasksRequest): + The request object. List tasks request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListTasksResponse: + List tasks response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListTasks._get_http_options() + + request, metadata = self._interceptor.pre_list_tasks(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListTasks._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListTasks._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListTasks", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListTasks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListTasks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListTasksResponse() + pb_resp = service.ListTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_tasks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tasks_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListTasksResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListTasks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListZoneActions(_BaseDataplexServiceRestTransport._BaseListZoneActions, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListZoneActions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListZoneActionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListActionsResponse: + r"""Call the list zone actions method over HTTP. + + Args: + request (~.service.ListZoneActionsRequest): + The request object. List zone actions request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListActionsResponse: + List actions response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_http_options() + + request, metadata = self._interceptor.pre_list_zone_actions(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListZoneActions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListZoneActions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListZoneActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListActionsResponse() + pb_resp = service.ListActionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_zone_actions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_zone_actions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListActionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListZoneActions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListZones(_BaseDataplexServiceRestTransport._BaseListZones, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListZones") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: service.ListZonesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.ListZonesResponse: + r"""Call the list zones method over HTTP. + + Args: + request (~.service.ListZonesRequest): + The request object. List zones request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.ListZonesResponse: + List zones response. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListZones._get_http_options() + + request, metadata = self._interceptor.pre_list_zones(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListZones._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListZones._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListZones", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListZones", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListZones._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListZonesResponse() + pb_resp = service.ListZonesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_zones(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_zones_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.ListZonesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_zones", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListZones", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RunTask(_BaseDataplexServiceRestTransport._BaseRunTask, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.RunTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.RunTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> service.RunTaskResponse: + r"""Call the run task method over HTTP. + + Args: + request (~.service.RunTaskRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.service.RunTaskResponse: + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseRunTask._get_http_options() + + request, metadata = self._interceptor.pre_run_task(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseRunTask._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseRunTask._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseRunTask._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.RunTask", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "RunTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._RunTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.RunTaskResponse() + pb_resp = service.RunTaskResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_run_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_task_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = service.RunTaskResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.run_task", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "RunTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateAsset(_BaseDataplexServiceRestTransport._BaseUpdateAsset, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.UpdateAsset") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.UpdateAssetRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update asset method over HTTP. + + Args: + request (~.service.UpdateAssetRequest): + The request object. Update asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_http_options() + + request, metadata = self._interceptor.pre_update_asset(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateAsset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateAsset", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._UpdateAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_asset_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_asset", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateAsset", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEnvironment(_BaseDataplexServiceRestTransport._BaseUpdateEnvironment, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.UpdateEnvironment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.UpdateEnvironmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update environment method over HTTP. + + Args: + request (~.service.UpdateEnvironmentRequest): + The request object. Update environment request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_http_options() + + request, metadata = self._interceptor.pre_update_environment(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateEnvironment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateEnvironment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._UpdateEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_environment_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_environment", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateEnvironment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateLake(_BaseDataplexServiceRestTransport._BaseUpdateLake, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.UpdateLake") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.UpdateLakeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update lake method over HTTP. + + Args: + request (~.service.UpdateLakeRequest): + The request object. Update lake request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_http_options() + + request, metadata = self._interceptor.pre_update_lake(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateLake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateLake", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._UpdateLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_lake(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_lake_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_lake", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateLake", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateTask(_BaseDataplexServiceRestTransport._BaseUpdateTask, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.UpdateTask") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.UpdateTaskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update task method over HTTP. + + Args: + request (~.service.UpdateTaskRequest): + The request object. Update task request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_http_options() + + request, metadata = self._interceptor.pre_update_task(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateTask", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateTask", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._UpdateTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_task_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_task", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateTask", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateZone(_BaseDataplexServiceRestTransport._BaseUpdateZone, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.UpdateZone") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: service.UpdateZoneRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update zone method over HTTP. + + Args: + request (~.service.UpdateZoneRequest): + The request object. Update zone request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_http_options() + + request, metadata = self._interceptor.pre_update_zone(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateZone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateZone", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._UpdateZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_zone_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_zone", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "UpdateZone", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def cancel_job(self) -> Callable[ + [service.CancelJobRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_asset(self) -> Callable[ + [service.CreateAssetRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAsset(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_environment(self) -> Callable[ + [service.CreateEnvironmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_lake(self) -> Callable[ + [service.CreateLakeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateLake(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_task(self) -> Callable[ + [service.CreateTaskRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_zone(self) -> Callable[ + [service.CreateZoneRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_asset(self) -> Callable[ + [service.DeleteAssetRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAsset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_environment(self) -> Callable[ + [service.DeleteEnvironmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_lake(self) -> Callable[ + [service.DeleteLakeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLake(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_task(self) -> Callable[ + [service.DeleteTaskRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_zone(self) -> Callable[ + [service.DeleteZoneRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_asset(self) -> Callable[ + [service.GetAssetRequest], + resources.Asset]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAsset(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_environment(self) -> Callable[ + [service.GetEnvironmentRequest], + analyze.Environment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[ + [service.GetJobRequest], + tasks.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_lake(self) -> Callable[ + [service.GetLakeRequest], + resources.Lake]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLake(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_task(self) -> Callable[ + [service.GetTaskRequest], + tasks.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_zone(self) -> Callable[ + [service.GetZoneRequest], + resources.Zone]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_asset_actions(self) -> Callable[ + [service.ListAssetActionsRequest], + service.ListActionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAssetActions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_assets(self) -> Callable[ + [service.ListAssetsRequest], + service.ListAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_environments(self) -> Callable[ + [service.ListEnvironmentsRequest], + service.ListEnvironmentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEnvironments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[ + [service.ListJobsRequest], + service.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_lake_actions(self) -> Callable[ + [service.ListLakeActionsRequest], + service.ListActionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLakeActions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_lakes(self) -> Callable[ + [service.ListLakesRequest], + service.ListLakesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLakes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sessions(self) -> Callable[ + [service.ListSessionsRequest], + service.ListSessionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tasks(self) -> Callable[ + [service.ListTasksRequest], + service.ListTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_zone_actions(self) -> Callable[ + [service.ListZoneActionsRequest], + service.ListActionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListZoneActions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_zones(self) -> Callable[ + [service.ListZonesRequest], + service.ListZonesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListZones(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_task(self) -> Callable[ + [service.RunTaskRequest], + service.RunTaskResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_asset(self) -> Callable[ + [service.UpdateAssetRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAsset(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_environment(self) -> Callable[ + [service.UpdateEnvironmentRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_lake(self) -> Callable[ + [service.UpdateLakeRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLake(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_task(self) -> Callable[ + [service.UpdateTaskRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_zone(self) -> Callable[ + [service.UpdateZoneRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateZone(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseDataplexServiceRestTransport._BaseGetLocation, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseDataplexServiceRestTransport._BaseListLocations, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseDataplexServiceRestTransport._BaseCancelOperation, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseDataplexServiceRestTransport._BaseDeleteOperation, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseDataplexServiceRestTransport._BaseGetOperation, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseDataplexServiceRestTransport._BaseListOperations, DataplexServiceRestStub): + def __hash__(self): + return hash("DataplexServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDataplexServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDataplexServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDataplexServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataplexServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.DataplexService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DataplexServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py new file mode 100644 index 000000000000..e0aa68e5b753 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py @@ -0,0 +1,1612 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDataplexServiceRestTransport(DataplexServiceTransport): + """Base REST backend transport for DataplexService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCancelJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateAsset: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "assetId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets', + 'body': 'asset', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateAssetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCreateAsset._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateEnvironment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "environmentId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/environments', + 'body': 'environment', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateLake: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "lakeId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/lakes', + 'body': 'lake', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateLakeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCreateLake._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "taskId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/tasks', + 'body': 'task', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCreateTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateZone: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "zoneId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/zones', + 'body': 'zone', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseCreateZone._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteAsset: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteAssetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteAsset._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEnvironment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/environments/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteLake: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteLakeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteLake._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteZone: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteZone._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAsset: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetAssetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetAsset._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEnvironment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/environments/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetEnvironment._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetJob._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLake: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetLakeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetLake._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetZone: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseGetZone._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAssetActions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/assets/*}/actions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListAssetActionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListAssetActions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAssets: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEnvironments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/environments', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListEnvironmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListEnvironments._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListJobs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/tasks/*}/jobs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListJobs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListLakeActions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/actions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListLakeActionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListLakeActions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListLakes: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/lakes', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListLakesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListLakes._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSessions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/environments/*}/sessions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListSessions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTasks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/tasks', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListTasks._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListZoneActions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/actions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListZoneActionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListZoneActions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListZones: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/zones', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListZonesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseListZones._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRunTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}:run', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.RunTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseRunTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateAsset: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{asset.name=projects/*/locations/*/lakes/*/zones/*/assets/*}', + 'body': 'asset', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpdateAssetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateAsset._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEnvironment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{environment.name=projects/*/locations/*/lakes/*/environments/*}', + 'body': 'environment', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpdateEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateLake: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{lake.name=projects/*/locations/*/lakes/*}', + 'body': 'lake', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpdateLakeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateLake._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateTask: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{task.name=projects/*/locations/*/lakes/*/tasks/*}', + 'body': 'task', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpdateTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateTask._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateZone: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{zone.name=projects/*/locations/*/lakes/*/zones/*}', + 'body': 'zone', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpdateZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateZone._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseDataplexServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py new file mode 100644 index 000000000000..6d73da7177b0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetadataServiceClient +from .async_client import MetadataServiceAsyncClient + +__all__ = ( + 'MetadataServiceClient', + 'MetadataServiceAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py new file mode 100644 index 000000000000..a764830d7bc8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -0,0 +1,1571 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport +from .client import MetadataServiceClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class MetadataServiceAsyncClient: + """Metadata service manages metadata resources such as tables, + filesets and partitions. + """ + + _client: MetadataServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MetadataServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MetadataServiceClient._DEFAULT_UNIVERSE + + entity_path = staticmethod(MetadataServiceClient.entity_path) + parse_entity_path = staticmethod(MetadataServiceClient.parse_entity_path) + partition_path = staticmethod(MetadataServiceClient.partition_path) + parse_partition_path = staticmethod(MetadataServiceClient.parse_partition_path) + zone_path = staticmethod(MetadataServiceClient.zone_path) + parse_zone_path = staticmethod(MetadataServiceClient.parse_zone_path) + common_billing_account_path = staticmethod(MetadataServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetadataServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(MetadataServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(MetadataServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(MetadataServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(MetadataServiceClient.parse_common_organization_path) + common_project_path = staticmethod(MetadataServiceClient.common_project_path) + parse_common_project_path = staticmethod(MetadataServiceClient.parse_common_project_path) + common_location_path = staticmethod(MetadataServiceClient.common_location_path) + parse_common_location_path = staticmethod(MetadataServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceAsyncClient: The constructed client. + """ + return MetadataServiceClient.from_service_account_info.__func__(MetadataServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceAsyncClient: The constructed client. + """ + return MetadataServiceClient.from_service_account_file.__func__(MetadataServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetadataServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetadataServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetadataServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MetadataServiceClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metadata service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetadataServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.MetadataServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "credentialsType": None, + } + ) + + async def create_entity(self, + request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity: Optional[metadata_.Entity] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = await client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]]): + The request object. Create a metadata entity request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (:class:`google.cloud.dataplex_v1.types.Entity`): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entity] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entity(self, + request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = await client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]]): + The request object. Update a metadata entity request. + The exiting entity will be fully + replaced by the entity in the request. + The entity ID is mutable. To modify the + ID, use the current entity ID in the + request URL and specify the new ID in + the request body. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entity.name", request.entity.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entity(self, + request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + await client.delete_entity(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]]): + The request object. Delete a metadata entity request. + name (:class:`str`): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_entity(self, + request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Get a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]]): + The request object. Get metadata entity request. + name (:class:`str`): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetEntityRequest): + request = metadata_.GetEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entities(self, + request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntitiesAsyncPager: + r"""List metadata entities in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]]): + The request object. List metadata entities request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager: + List metadata entities response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListEntitiesRequest): + request = metadata_.ListEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_partition(self, + request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + partition: Optional[metadata_.Partition] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = await client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]]): + The request object. Create metadata partition request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (:class:`google.cloud.dataplex_v1.types.Partition`): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partition] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_partition(self, + request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_partition(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]]): + The request object. Delete metadata partition request. + name (:class:`str`): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_partition(self, + request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Partition: + r"""Get a metadata partition of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]]): + The request object. Get metadata partition request. + name (:class:`str`): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetPartitionRequest): + request = metadata_.GetPartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_partitions(self, + request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartitionsAsyncPager: + r"""List metadata partitions of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]]): + The request object. List metadata partitions request. + parent (:class:`str`): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager: + List metadata partitions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListPartitionsRequest): + request = metadata_.ListPartitionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPartitionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MetadataServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "MetadataServiceAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py new file mode 100644 index 000000000000..0acd72595f44 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -0,0 +1,1953 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.dataplex_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetadataServiceGrpcTransport +from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport +from .transports.rest import MetadataServiceRestTransport + + +class MetadataServiceClientMeta(type): + """Metaclass for the MetadataService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] + _transport_registry["grpc"] = MetadataServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MetadataServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MetadataServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MetadataServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetadataServiceClient(metaclass=MetadataServiceClientMeta): + """Metadata service manages metadata resources such as tables, + filesets and partitions. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetadataServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetadataServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MetadataServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: + """Returns a fully-qualified entity string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + + @staticmethod + def parse_entity_path(path: str) -> Dict[str,str]: + """Parses a entity path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def partition_path(project: str,location: str,lake: str,zone: str,entity: str,partition: str,) -> str: + """Returns a fully-qualified partition string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) + + @staticmethod + def parse_partition_path(path: str) -> Dict[str,str]: + """Parses a partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)/partitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def zone_path(project: str,location: str,lake: str,zone: str,) -> str: + """Returns a fully-qualified zone string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + + @staticmethod + def parse_zone_path(path: str) -> Dict[str,str]: + """Parses a zone path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MetadataServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metadata service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetadataServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetadataServiceClient._read_environment_variables() + self._client_cert_source = MetadataServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = MetadataServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MetadataServiceTransport) + if transport_provided: + # transport is a MetadataServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MetadataServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + MetadataServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport]] = ( + MetadataServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetadataServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.MetadataServiceClient`.", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "credentialsType": None, + } + ) + + def create_entity(self, + request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity: Optional[metadata_.Entity] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): + The request object. Create a metadata entity request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entity] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity(self, + request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): + The request object. Update a metadata entity request. + The exiting entity will be fully + replaced by the entity in the request. + The entity ID is mutable. To modify the + ID, use the current entity ID in the + request URL and specify the new ID in + the request body. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entity.name", request.entity.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entity(self, + request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): + The request object. Delete a metadata entity request. + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_entity(self, + request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Entity: + r"""Get a metadata entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]): + The request object. Get metadata entity request. + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetEntityRequest): + request = metadata_.GetEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entities(self, + request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListEntitiesPager: + r"""List metadata entities in a zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]): + The request object. List metadata entities request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager: + List metadata entities response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListEntitiesRequest): + request = metadata_.ListEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entities] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitiesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_partition(self, + request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + partition: Optional[metadata_.Partition] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): + The request object. Create metadata partition request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partition] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_partition(self, + request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete a metadata partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): + The request object. Delete metadata partition request. + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_partition(self, + request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> metadata_.Partition: + r"""Get a metadata partition of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]): + The request object. Get metadata partition request. + name (str): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.GetPartitionRequest): + request = metadata_.GetPartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_partitions(self, + request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartitionsPager: + r"""List metadata partitions of an entity. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]): + The request object. List metadata partitions request. + parent (str): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager: + List metadata partitions response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, metadata_.ListPartitionsRequest): + request = metadata_.ListPartitionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPartitionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MetadataServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "MetadataServiceClient", +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py new file mode 100644 index 000000000000..33f41499e016 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ + + +class ListEntitiesPager: + """A pager for iterating through ``list_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntities`` requests and continue to iterate + through the ``entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metadata_.ListEntitiesResponse], + request: metadata_.ListEntitiesRequest, + response: metadata_.ListEntitiesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntitiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntitiesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = metadata_.ListEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metadata_.ListEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metadata_.Entity]: + for page in self.pages: + yield from page.entities + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntitiesAsyncPager: + """A pager for iterating through ``list_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntities`` requests and continue to iterate + through the ``entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metadata_.ListEntitiesResponse]], + request: metadata_.ListEntitiesRequest, + response: metadata_.ListEntitiesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEntitiesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEntitiesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = metadata_.ListEntitiesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metadata_.ListEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metadata_.Entity]: + async def async_generator(): + async for page in self.pages: + for response in page.entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPartitionsPager: + """A pager for iterating through ``list_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPartitions`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metadata_.ListPartitionsResponse], + request: metadata_.ListPartitionsRequest, + response: metadata_.ListPartitionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListPartitionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListPartitionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = metadata_.ListPartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metadata_.ListPartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metadata_.Partition]: + for page in self.pages: + yield from page.partitions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPartitionsAsyncPager: + """A pager for iterating through ``list_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``partitions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPartitions`` requests and continue to iterate + through the ``partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metadata_.ListPartitionsResponse]], + request: metadata_.ListPartitionsRequest, + response: metadata_.ListPartitionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListPartitionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListPartitionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = metadata_.ListPartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metadata_.ListPartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metadata_.Partition]: + async def async_generator(): + async for page in self.pages: + for response in page.partitions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst new file mode 100644 index 000000000000..ff25cadba5cb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetadataServiceTransport` is the ABC for all transports. +- public child `MetadataServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetadataServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetadataServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetadataServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py new file mode 100644 index 000000000000..373a4faff810 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetadataServiceTransport +from .grpc import MetadataServiceGrpcTransport +from .grpc_asyncio import MetadataServiceGrpcAsyncIOTransport +from .rest import MetadataServiceRestTransport +from .rest import MetadataServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] +_transport_registry['grpc'] = MetadataServiceGrpcTransport +_transport_registry['grpc_asyncio'] = MetadataServiceGrpcAsyncIOTransport +_transport_registry['rest'] = MetadataServiceRestTransport + +__all__ = ( + 'MetadataServiceTransport', + 'MetadataServiceGrpcTransport', + 'MetadataServiceGrpcAsyncIOTransport', + 'MetadataServiceRestTransport', + 'MetadataServiceRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py new file mode 100644 index 000000000000..b184c3faf9f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataplex_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MetadataServiceTransport(abc.ABC): + """Abstract transport class for MetadataService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dataplex.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_entity: gapic_v1.method.wrap_method( + self.create_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entity: gapic_v1.method.wrap_method( + self.update_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entity: gapic_v1.method.wrap_method( + self.delete_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.get_entity: gapic_v1.method.wrap_method( + self.get_entity, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_entities: gapic_v1.method.wrap_method( + self.list_entities, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_partition: gapic_v1.method.wrap_method( + self.create_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_partition: gapic_v1.method.wrap_method( + self.delete_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.get_partition: gapic_v1.method.wrap_method( + self.get_partition, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_partitions: gapic_v1.method.wrap_method( + self.list_partitions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + Union[ + metadata_.Entity, + Awaitable[metadata_.Entity] + ]]: + raise NotImplementedError() + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + Union[ + metadata_.ListEntitiesResponse, + Awaitable[metadata_.ListEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + Union[ + metadata_.Partition, + Awaitable[metadata_.Partition] + ]]: + raise NotImplementedError() + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + Union[ + metadata_.Partition, + Awaitable[metadata_.Partition] + ]]: + raise NotImplementedError() + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + Union[ + metadata_.ListPartitionsResponse, + Awaitable[metadata_.ListPartitionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MetadataServiceTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py new file mode 100644 index 000000000000..f1c6337633c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MetadataServiceGrpcTransport(MetadataServiceTransport): + """gRPC backend transport for MetadataService. + + Metadata service manages metadata resources such as tables, + filesets and partitions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + metadata_.Entity]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entity' not in self._stubs: + self._stubs['create_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreateEntity', + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['create_entity'] + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + metadata_.Entity]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entity' not in self._stubs: + self._stubs['update_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['update_entity'] + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entity' not in self._stubs: + self._stubs['delete_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entity'] + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + metadata_.Entity]: + r"""Return a callable for the get entity method over gRPC. + + Get a metadata entity. + + Returns: + Callable[[~.GetEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entity' not in self._stubs: + self._stubs['get_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetEntity', + request_serializer=metadata_.GetEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['get_entity'] + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + metadata_.ListEntitiesResponse]: + r"""Return a callable for the list entities method over gRPC. + + List metadata entities in a zone. + + Returns: + Callable[[~.ListEntitiesRequest], + ~.ListEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entities' not in self._stubs: + self._stubs['list_entities'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListEntities', + request_serializer=metadata_.ListEntitiesRequest.serialize, + response_deserializer=metadata_.ListEntitiesResponse.deserialize, + ) + return self._stubs['list_entities'] + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + metadata_.Partition]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + ~.Partition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_partition' not in self._stubs: + self._stubs['create_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreatePartition', + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['create_partition'] + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_partition' not in self._stubs: + self._stubs['delete_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeletePartition', + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_partition'] + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + metadata_.Partition]: + r"""Return a callable for the get partition method over gRPC. + + Get a metadata partition of an entity. + + Returns: + Callable[[~.GetPartitionRequest], + ~.Partition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_partition' not in self._stubs: + self._stubs['get_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetPartition', + request_serializer=metadata_.GetPartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['get_partition'] + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + metadata_.ListPartitionsResponse]: + r"""Return a callable for the list partitions method over gRPC. + + List metadata partitions of an entity. + + Returns: + Callable[[~.ListPartitionsRequest], + ~.ListPartitionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_partitions' not in self._stubs: + self._stubs['list_partitions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListPartitions', + request_serializer=metadata_.ListPartitionsRequest.serialize, + response_deserializer=metadata_.ListPartitionsResponse.deserialize, + ) + return self._stubs['list_partitions'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'MetadataServiceGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2860f724048e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py @@ -0,0 +1,796 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import MetadataServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MetadataServiceGrpcAsyncIOTransport(MetadataServiceTransport): + """gRPC AsyncIO backend transport for MetadataService. + + Metadata service manages metadata resources such as tables, + filesets and partitions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entity' not in self._stubs: + self._stubs['create_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreateEntity', + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['create_entity'] + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entity' not in self._stubs: + self._stubs['update_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['update_entity'] + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entity' not in self._stubs: + self._stubs['delete_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entity'] + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + Awaitable[metadata_.Entity]]: + r"""Return a callable for the get entity method over gRPC. + + Get a metadata entity. + + Returns: + Callable[[~.GetEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entity' not in self._stubs: + self._stubs['get_entity'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetEntity', + request_serializer=metadata_.GetEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs['get_entity'] + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + Awaitable[metadata_.ListEntitiesResponse]]: + r"""Return a callable for the list entities method over gRPC. + + List metadata entities in a zone. + + Returns: + Callable[[~.ListEntitiesRequest], + Awaitable[~.ListEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entities' not in self._stubs: + self._stubs['list_entities'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListEntities', + request_serializer=metadata_.ListEntitiesRequest.serialize, + response_deserializer=metadata_.ListEntitiesResponse.deserialize, + ) + return self._stubs['list_entities'] + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + Awaitable[metadata_.Partition]]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + Awaitable[~.Partition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_partition' not in self._stubs: + self._stubs['create_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/CreatePartition', + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['create_partition'] + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_partition' not in self._stubs: + self._stubs['delete_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/DeletePartition', + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_partition'] + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + Awaitable[metadata_.Partition]]: + r"""Return a callable for the get partition method over gRPC. + + Get a metadata partition of an entity. + + Returns: + Callable[[~.GetPartitionRequest], + Awaitable[~.Partition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_partition' not in self._stubs: + self._stubs['get_partition'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/GetPartition', + request_serializer=metadata_.GetPartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs['get_partition'] + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + Awaitable[metadata_.ListPartitionsResponse]]: + r"""Return a callable for the list partitions method over gRPC. + + List metadata partitions of an entity. + + Returns: + Callable[[~.ListPartitionsRequest], + Awaitable[~.ListPartitionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_partitions' not in self._stubs: + self._stubs['list_partitions'] = self._logged_channel.unary_unary( + '/google.cloud.dataplex.v1.MetadataService/ListPartitions', + request_serializer=metadata_.ListPartitionsRequest.serialize, + response_deserializer=metadata_.ListPartitionsResponse.deserialize, + ) + return self._stubs['list_partitions'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_entity: self._wrap_method( + self.create_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.update_entity: self._wrap_method( + self.update_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_entity: self._wrap_method( + self.delete_entity, + default_timeout=60.0, + client_info=client_info, + ), + self.get_entity: self._wrap_method( + self.get_entity, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_entities: self._wrap_method( + self.list_entities, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_partition: self._wrap_method( + self.create_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_partition: self._wrap_method( + self.delete_partition, + default_timeout=60.0, + client_info=client_info, + ), + self.get_partition: self._wrap_method( + self.get_partition, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_partitions: self._wrap_method( + self.list_partitions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'MetadataServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py new file mode 100644 index 000000000000..617fb4ddec8d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py @@ -0,0 +1,2403 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.dataplex_v1.types import metadata_ +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseMetadataServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MetadataServiceRestInterceptor: + """Interceptor for MetadataService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetadataServiceRestTransport. + + .. code-block:: python + class MyCustomMetadataServiceInterceptor(MetadataServiceRestInterceptor): + def pre_create_entity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_entity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_partitions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_partitions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MetadataServiceRestTransport(interceptor=MyCustomMetadataServiceInterceptor()) + client = MetadataServiceClient(transport=transport) + + + """ + def pre_create_entity(self, request: metadata_.CreateEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.CreateEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entity + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_create_entity(self, response: metadata_.Entity) -> metadata_.Entity: + """Post-rpc interceptor for create_entity + + DEPRECATED. Please use the `post_create_entity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_create_entity` interceptor runs + before the `post_create_entity_with_metadata` interceptor. + """ + return response + + def post_create_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_create_entity_with_metadata` + interceptor in new development instead of the `post_create_entity` interceptor. + When both interceptors are used, this `post_create_entity_with_metadata` interceptor runs after the + `post_create_entity` interceptor. The (possibly modified) response returned by + `post_create_entity` will be passed to + `post_create_entity_with_metadata`. + """ + return response, metadata + + def pre_create_partition(self, request: metadata_.CreatePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.CreatePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_create_partition(self, response: metadata_.Partition) -> metadata_.Partition: + """Post-rpc interceptor for create_partition + + DEPRECATED. Please use the `post_create_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_create_partition` interceptor runs + before the `post_create_partition_with_metadata` interceptor. + """ + return response + + def post_create_partition_with_metadata(self, response: metadata_.Partition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Partition, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_create_partition_with_metadata` + interceptor in new development instead of the `post_create_partition` interceptor. + When both interceptors are used, this `post_create_partition_with_metadata` interceptor runs after the + `post_create_partition` interceptor. The (possibly modified) response returned by + `post_create_partition` will be passed to + `post_create_partition_with_metadata`. + """ + return response, metadata + + def pre_delete_entity(self, request: metadata_.DeleteEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.DeleteEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entity + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def pre_delete_partition(self, request: metadata_.DeletePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.DeletePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def pre_get_entity(self, request: metadata_.GetEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.GetEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entity + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_get_entity(self, response: metadata_.Entity) -> metadata_.Entity: + """Post-rpc interceptor for get_entity + + DEPRECATED. Please use the `post_get_entity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_get_entity` interceptor runs + before the `post_get_entity_with_metadata` interceptor. + """ + return response + + def post_get_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_get_entity_with_metadata` + interceptor in new development instead of the `post_get_entity` interceptor. + When both interceptors are used, this `post_get_entity_with_metadata` interceptor runs after the + `post_get_entity` interceptor. The (possibly modified) response returned by + `post_get_entity` will be passed to + `post_get_entity_with_metadata`. + """ + return response, metadata + + def pre_get_partition(self, request: metadata_.GetPartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.GetPartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_get_partition(self, response: metadata_.Partition) -> metadata_.Partition: + """Post-rpc interceptor for get_partition + + DEPRECATED. Please use the `post_get_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_get_partition` interceptor runs + before the `post_get_partition_with_metadata` interceptor. + """ + return response + + def post_get_partition_with_metadata(self, response: metadata_.Partition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Partition, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_get_partition_with_metadata` + interceptor in new development instead of the `post_get_partition` interceptor. + When both interceptors are used, this `post_get_partition_with_metadata` interceptor runs after the + `post_get_partition` interceptor. The (possibly modified) response returned by + `post_get_partition` will be passed to + `post_get_partition_with_metadata`. + """ + return response, metadata + + def pre_list_entities(self, request: metadata_.ListEntitiesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_list_entities(self, response: metadata_.ListEntitiesResponse) -> metadata_.ListEntitiesResponse: + """Post-rpc interceptor for list_entities + + DEPRECATED. Please use the `post_list_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_list_entities` interceptor runs + before the `post_list_entities_with_metadata` interceptor. + """ + return response + + def post_list_entities_with_metadata(self, response: metadata_.ListEntitiesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListEntitiesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_list_entities_with_metadata` + interceptor in new development instead of the `post_list_entities` interceptor. + When both interceptors are used, this `post_list_entities_with_metadata` interceptor runs after the + `post_list_entities` interceptor. The (possibly modified) response returned by + `post_list_entities` will be passed to + `post_list_entities_with_metadata`. + """ + return response, metadata + + def pre_list_partitions(self, request: metadata_.ListPartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListPartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_partitions + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_list_partitions(self, response: metadata_.ListPartitionsResponse) -> metadata_.ListPartitionsResponse: + """Post-rpc interceptor for list_partitions + + DEPRECATED. Please use the `post_list_partitions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_list_partitions` interceptor runs + before the `post_list_partitions_with_metadata` interceptor. + """ + return response + + def post_list_partitions_with_metadata(self, response: metadata_.ListPartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListPartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_partitions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_list_partitions_with_metadata` + interceptor in new development instead of the `post_list_partitions` interceptor. + When both interceptors are used, this `post_list_partitions_with_metadata` interceptor runs after the + `post_list_partitions` interceptor. The (possibly modified) response returned by + `post_list_partitions` will be passed to + `post_list_partitions_with_metadata`. + """ + return response, metadata + + def pre_update_entity(self, request: metadata_.UpdateEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.UpdateEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_entity + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_update_entity(self, response: metadata_.Entity) -> metadata_.Entity: + """Post-rpc interceptor for update_entity + + DEPRECATED. Please use the `post_update_entity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. This `post_update_entity` interceptor runs + before the `post_update_entity_with_metadata` interceptor. + """ + return response + + def post_update_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetadataService server but before it is returned to user code. + + We recommend only using this `post_update_entity_with_metadata` + interceptor in new development instead of the `post_update_entity` interceptor. + When both interceptors are used, this `post_update_entity_with_metadata` interceptor runs after the + `post_update_entity` interceptor. The (possibly modified) response returned by + `post_update_entity` will be passed to + `post_update_entity_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetadataService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the MetadataService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetadataServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetadataServiceRestInterceptor + + +class MetadataServiceRestTransport(_BaseMetadataServiceRestTransport): + """REST backend synchronous transport for MetadataService. + + Metadata service manages metadata resources such as tables, + filesets and partitions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MetadataServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetadataServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateEntity(_BaseMetadataServiceRestTransport._BaseCreateEntity, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.CreateEntity") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: metadata_.CreateEntityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.Entity: + r"""Call the create entity method over HTTP. + + Args: + request (~.metadata_.CreateEntityRequest): + The request object. Create a metadata entity request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + + http_options = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_http_options() + + request, metadata = self._interceptor.pre_create_entity(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_transcoded_request(http_options, request) + + body = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CreateEntity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "CreateEntity", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._CreateEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.Entity() + pb_resp = metadata_.Entity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entity_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.Entity.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.create_entity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "CreateEntity", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreatePartition(_BaseMetadataServiceRestTransport._BaseCreatePartition, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.CreatePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: metadata_.CreatePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.Partition: + r"""Call the create partition method over HTTP. + + Args: + request (~.metadata_.CreatePartitionRequest): + The request object. Create metadata partition request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.Partition: + Represents partition metadata + contained within entity instances. + + """ + + http_options = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_http_options() + + request, metadata = self._interceptor.pre_create_partition(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_transcoded_request(http_options, request) + + body = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CreatePartition", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "CreatePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._CreatePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.Partition() + pb_resp = metadata_.Partition.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.Partition.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.create_partition", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "CreatePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteEntity(_BaseMetadataServiceRestTransport._BaseDeleteEntity, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.DeleteEntity") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.DeleteEntityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete entity method over HTTP. + + Args: + request (~.metadata_.DeleteEntityRequest): + The request object. Delete a metadata entity request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_http_options() + + request, metadata = self._interceptor.pre_delete_entity(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeleteEntity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "DeleteEntity", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._DeleteEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeletePartition(_BaseMetadataServiceRestTransport._BaseDeletePartition, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.DeletePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.DeletePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete partition method over HTTP. + + Args: + request (~.metadata_.DeletePartitionRequest): + The request object. Delete metadata partition request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_http_options() + + request, metadata = self._interceptor.pre_delete_partition(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeletePartition", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "DeletePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._DeletePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetEntity(_BaseMetadataServiceRestTransport._BaseGetEntity, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.GetEntity") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.GetEntityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.Entity: + r"""Call the get entity method over HTTP. + + Args: + request (~.metadata_.GetEntityRequest): + The request object. Get metadata entity request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + + http_options = _BaseMetadataServiceRestTransport._BaseGetEntity._get_http_options() + + request, metadata = self._interceptor.pre_get_entity(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseGetEntity._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseGetEntity._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetEntity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetEntity", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._GetEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.Entity() + pb_resp = metadata_.Entity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.Entity.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.get_entity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetEntity", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetPartition(_BaseMetadataServiceRestTransport._BaseGetPartition, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.GetPartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.GetPartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.Partition: + r"""Call the get partition method over HTTP. + + Args: + request (~.metadata_.GetPartitionRequest): + The request object. Get metadata partition request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.Partition: + Represents partition metadata + contained within entity instances. + + """ + + http_options = _BaseMetadataServiceRestTransport._BaseGetPartition._get_http_options() + + request, metadata = self._interceptor.pre_get_partition(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseGetPartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseGetPartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetPartition", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetPartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._GetPartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.Partition() + pb_resp = metadata_.Partition.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.Partition.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.get_partition", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetPartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListEntities(_BaseMetadataServiceRestTransport._BaseListEntities, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.ListEntities") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.ListEntitiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.ListEntitiesResponse: + r"""Call the list entities method over HTTP. + + Args: + request (~.metadata_.ListEntitiesRequest): + The request object. List metadata entities request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.ListEntitiesResponse: + List metadata entities response. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseListEntities._get_http_options() + + request, metadata = self._interceptor.pre_list_entities(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseListEntities._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseListEntities._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListEntities", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListEntities", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._ListEntities._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.ListEntitiesResponse() + pb_resp = metadata_.ListEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entities_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.ListEntitiesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.list_entities", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListEntities", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListPartitions(_BaseMetadataServiceRestTransport._BaseListPartitions, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.ListPartitions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: metadata_.ListPartitionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.ListPartitionsResponse: + r"""Call the list partitions method over HTTP. + + Args: + request (~.metadata_.ListPartitionsRequest): + The request object. List metadata partitions request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.ListPartitionsResponse: + List metadata partitions response. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseListPartitions._get_http_options() + + request, metadata = self._interceptor.pre_list_partitions(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseListPartitions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseListPartitions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListPartitions", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListPartitions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._ListPartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.ListPartitionsResponse() + pb_resp = metadata_.ListPartitionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_partitions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_partitions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.ListPartitionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListPartitions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateEntity(_BaseMetadataServiceRestTransport._BaseUpdateEntity, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.UpdateEntity") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: metadata_.UpdateEntityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> metadata_.Entity: + r"""Call the update entity method over HTTP. + + Args: + request (~.metadata_.UpdateEntityRequest): + The request object. Update a metadata entity request. + The exiting entity will be fully + replaced by the entity in the request. + The entity ID is mutable. To modify the + ID, use the current entity ID in the + request URL and specify the new ID in + the request body. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.metadata_.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + + http_options = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_http_options() + + request, metadata = self._interceptor.pre_update_entity(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_transcoded_request(http_options, request) + + body = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.UpdateEntity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "UpdateEntity", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._UpdateEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metadata_.Entity() + pb_resp = metadata_.Entity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_entity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entity_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = metadata_.Entity.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceClient.update_entity", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "UpdateEntity", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_entity(self) -> Callable[ + [metadata_.CreateEntityRequest], + metadata_.Entity]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntity(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_partition(self) -> Callable[ + [metadata_.CreatePartitionRequest], + metadata_.Partition]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entity(self) -> Callable[ + [metadata_.DeleteEntityRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntity(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_partition(self) -> Callable[ + [metadata_.DeletePartitionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity(self) -> Callable[ + [metadata_.GetEntityRequest], + metadata_.Entity]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntity(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_partition(self) -> Callable[ + [metadata_.GetPartitionRequest], + metadata_.Partition]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entities(self) -> Callable[ + [metadata_.ListEntitiesRequest], + metadata_.ListEntitiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_partitions(self) -> Callable[ + [metadata_.ListPartitionsRequest], + metadata_.ListPartitionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPartitions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity(self) -> Callable[ + [metadata_.UpdateEntityRequest], + metadata_.Entity]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntity(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseMetadataServiceRestTransport._BaseGetLocation, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseMetadataServiceRestTransport._BaseListLocations, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseMetadataServiceRestTransport._BaseCancelOperation, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + body = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CancelOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseMetadataServiceRestTransport._BaseDeleteOperation, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseMetadataServiceRestTransport._BaseGetOperation, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseMetadataServiceRestTransport._BaseListOperations, MetadataServiceRestStub): + def __hash__(self): + return hash("MetadataServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseMetadataServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseMetadataServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMetadataServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MetadataServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.dataplex.v1.MetadataService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MetadataServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py new file mode 100644 index 000000000000..c61cbbe05b13 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.dataplex_v1.types import metadata_ +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseMetadataServiceRestTransport(MetadataServiceTransport): + """Base REST backend transport for MetadataService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'dataplex.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateEntity: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities', + 'body': 'entity', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.CreateEntityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseCreateEntity._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreatePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions', + 'body': 'partition', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.CreatePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseCreatePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteEntity: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "etag" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.DeleteEntityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseDeleteEntity._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeletePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.DeletePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseDeletePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetEntity: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.GetEntityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseGetEntity._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetPartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.GetPartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseGetPartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEntities: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "view" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.ListEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseListEntities._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListPartitions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.ListPartitionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseListPartitions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateEntity: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/v1/{entity.name=projects/*/locations/*/lakes/*/zones/*/entities/*}', + 'body': 'entity', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = metadata_.UpdateEntityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMetadataServiceRestTransport._BaseUpdateEntity._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + { + 'method': 'get', + 'uri': '/v1/{name=organizations/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseMetadataServiceRestTransport', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py new file mode 100644 index 000000000000..995e34a65b4a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py @@ -0,0 +1,484 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .analyze import ( + Content, + Environment, + Session, +) +from .business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) +from .catalog import ( + Aspect, + AspectSource, + AspectType, + CancelMetadataJobRequest, + CreateAspectTypeRequest, + CreateEntryGroupRequest, + CreateEntryLinkRequest, + CreateEntryRequest, + CreateEntryTypeRequest, + CreateMetadataJobRequest, + DeleteAspectTypeRequest, + DeleteEntryGroupRequest, + DeleteEntryLinkRequest, + DeleteEntryRequest, + DeleteEntryTypeRequest, + Entry, + EntryGroup, + EntryLink, + EntrySource, + EntryType, + GetAspectTypeRequest, + GetEntryGroupRequest, + GetEntryLinkRequest, + GetEntryRequest, + GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, + ListAspectTypesRequest, + ListAspectTypesResponse, + ListEntriesRequest, + ListEntriesResponse, + ListEntryGroupsRequest, + ListEntryGroupsResponse, + ListEntryTypesRequest, + ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, + LookupEntryRequest, + MetadataJob, + SearchEntriesRequest, + SearchEntriesResponse, + SearchEntriesResult, + UpdateAspectTypeRequest, + UpdateEntryGroupRequest, + UpdateEntryRequest, + UpdateEntryTypeRequest, + EntryView, + TransferStatus, +) +from .cmek import ( + CreateEncryptionConfigRequest, + DeleteEncryptionConfigRequest, + EncryptionConfig, + GetEncryptionConfigRequest, + ListEncryptionConfigsRequest, + ListEncryptionConfigsResponse, + UpdateEncryptionConfigRequest, +) +from .content import ( + CreateContentRequest, + DeleteContentRequest, + GetContentRequest, + ListContentRequest, + ListContentResponse, + UpdateContentRequest, +) +from .data_discovery import ( + DataDiscoveryResult, + DataDiscoverySpec, +) +from .data_profile import ( + DataProfileResult, + DataProfileSpec, +) +from .data_quality import ( + DataQualityColumnResult, + DataQualityDimension, + DataQualityDimensionResult, + DataQualityResult, + DataQualityRule, + DataQualityRuleResult, + DataQualitySpec, +) +from .data_taxonomy import ( + CreateDataAttributeBindingRequest, + CreateDataAttributeRequest, + CreateDataTaxonomyRequest, + DataAttribute, + DataAttributeBinding, + DataTaxonomy, + DeleteDataAttributeBindingRequest, + DeleteDataAttributeRequest, + DeleteDataTaxonomyRequest, + GetDataAttributeBindingRequest, + GetDataAttributeRequest, + GetDataTaxonomyRequest, + ListDataAttributeBindingsRequest, + ListDataAttributeBindingsResponse, + ListDataAttributesRequest, + ListDataAttributesResponse, + ListDataTaxonomiesRequest, + ListDataTaxonomiesResponse, + UpdateDataAttributeBindingRequest, + UpdateDataAttributeRequest, + UpdateDataTaxonomyRequest, +) +from .datascans import ( + CreateDataScanRequest, + DataScan, + DataScanJob, + DeleteDataScanRequest, + GenerateDataQualityRulesRequest, + GenerateDataQualityRulesResponse, + GetDataScanJobRequest, + GetDataScanRequest, + ListDataScanJobsRequest, + ListDataScanJobsResponse, + ListDataScansRequest, + ListDataScansResponse, + RunDataScanRequest, + RunDataScanResponse, + UpdateDataScanRequest, + DataScanType, +) +from .datascans_common import ( + DataScanCatalogPublishingStatus, +) +from .logs import ( + BusinessGlossaryEvent, + DataQualityScanRuleResult, + DataScanEvent, + DiscoveryEvent, + EntryLinkEvent, + GovernanceEvent, + JobEvent, + SessionEvent, +) +from .metadata_ import ( + CreateEntityRequest, + CreatePartitionRequest, + DeleteEntityRequest, + DeletePartitionRequest, + Entity, + GetEntityRequest, + GetPartitionRequest, + ListEntitiesRequest, + ListEntitiesResponse, + ListPartitionsRequest, + ListPartitionsResponse, + Partition, + Schema, + StorageAccess, + StorageFormat, + UpdateEntityRequest, + StorageSystem, +) +from .processing import ( + DataSource, + ScannedData, + Trigger, +) +from .resources import ( + Action, + Asset, + AssetStatus, + Lake, + Zone, + State, +) +from .security import ( + DataAccessSpec, + ResourceAccessSpec, +) +from .service import ( + CancelJobRequest, + CreateAssetRequest, + CreateEnvironmentRequest, + CreateLakeRequest, + CreateTaskRequest, + CreateZoneRequest, + DeleteAssetRequest, + DeleteEnvironmentRequest, + DeleteLakeRequest, + DeleteTaskRequest, + DeleteZoneRequest, + GetAssetRequest, + GetEnvironmentRequest, + GetJobRequest, + GetLakeRequest, + GetTaskRequest, + GetZoneRequest, + ListActionsResponse, + ListAssetActionsRequest, + ListAssetsRequest, + ListAssetsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + ListJobsRequest, + ListJobsResponse, + ListLakeActionsRequest, + ListLakesRequest, + ListLakesResponse, + ListSessionsRequest, + ListSessionsResponse, + ListTasksRequest, + ListTasksResponse, + ListZoneActionsRequest, + ListZonesRequest, + ListZonesResponse, + OperationMetadata, + RunTaskRequest, + RunTaskResponse, + UpdateAssetRequest, + UpdateEnvironmentRequest, + UpdateLakeRequest, + UpdateTaskRequest, + UpdateZoneRequest, +) +from .tasks import ( + Job, + Task, +) + +__all__ = ( + 'Content', + 'Environment', + 'Session', + 'CreateGlossaryCategoryRequest', + 'CreateGlossaryRequest', + 'CreateGlossaryTermRequest', + 'DeleteGlossaryCategoryRequest', + 'DeleteGlossaryRequest', + 'DeleteGlossaryTermRequest', + 'GetGlossaryCategoryRequest', + 'GetGlossaryRequest', + 'GetGlossaryTermRequest', + 'Glossary', + 'GlossaryCategory', + 'GlossaryTerm', + 'ListGlossariesRequest', + 'ListGlossariesResponse', + 'ListGlossaryCategoriesRequest', + 'ListGlossaryCategoriesResponse', + 'ListGlossaryTermsRequest', + 'ListGlossaryTermsResponse', + 'UpdateGlossaryCategoryRequest', + 'UpdateGlossaryRequest', + 'UpdateGlossaryTermRequest', + 'Aspect', + 'AspectSource', + 'AspectType', + 'CancelMetadataJobRequest', + 'CreateAspectTypeRequest', + 'CreateEntryGroupRequest', + 'CreateEntryLinkRequest', + 'CreateEntryRequest', + 'CreateEntryTypeRequest', + 'CreateMetadataJobRequest', + 'DeleteAspectTypeRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryLinkRequest', + 'DeleteEntryRequest', + 'DeleteEntryTypeRequest', + 'Entry', + 'EntryGroup', + 'EntryLink', + 'EntrySource', + 'EntryType', + 'GetAspectTypeRequest', + 'GetEntryGroupRequest', + 'GetEntryLinkRequest', + 'GetEntryRequest', + 'GetEntryTypeRequest', + 'GetMetadataJobRequest', + 'ImportItem', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'LookupEntryRequest', + 'MetadataJob', + 'SearchEntriesRequest', + 'SearchEntriesResponse', + 'SearchEntriesResult', + 'UpdateAspectTypeRequest', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateEntryTypeRequest', + 'EntryView', + 'TransferStatus', + 'CreateEncryptionConfigRequest', + 'DeleteEncryptionConfigRequest', + 'EncryptionConfig', + 'GetEncryptionConfigRequest', + 'ListEncryptionConfigsRequest', + 'ListEncryptionConfigsResponse', + 'UpdateEncryptionConfigRequest', + 'CreateContentRequest', + 'DeleteContentRequest', + 'GetContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'UpdateContentRequest', + 'DataDiscoveryResult', + 'DataDiscoverySpec', + 'DataProfileResult', + 'DataProfileSpec', + 'DataQualityColumnResult', + 'DataQualityDimension', + 'DataQualityDimensionResult', + 'DataQualityResult', + 'DataQualityRule', + 'DataQualityRuleResult', + 'DataQualitySpec', + 'CreateDataAttributeBindingRequest', + 'CreateDataAttributeRequest', + 'CreateDataTaxonomyRequest', + 'DataAttribute', + 'DataAttributeBinding', + 'DataTaxonomy', + 'DeleteDataAttributeBindingRequest', + 'DeleteDataAttributeRequest', + 'DeleteDataTaxonomyRequest', + 'GetDataAttributeBindingRequest', + 'GetDataAttributeRequest', + 'GetDataTaxonomyRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'UpdateDataAttributeBindingRequest', + 'UpdateDataAttributeRequest', + 'UpdateDataTaxonomyRequest', + 'CreateDataScanRequest', + 'DataScan', + 'DataScanJob', + 'DeleteDataScanRequest', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'GetDataScanJobRequest', + 'GetDataScanRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'UpdateDataScanRequest', + 'DataScanType', + 'DataScanCatalogPublishingStatus', + 'BusinessGlossaryEvent', + 'DataQualityScanRuleResult', + 'DataScanEvent', + 'DiscoveryEvent', + 'EntryLinkEvent', + 'GovernanceEvent', + 'JobEvent', + 'SessionEvent', + 'CreateEntityRequest', + 'CreatePartitionRequest', + 'DeleteEntityRequest', + 'DeletePartitionRequest', + 'Entity', + 'GetEntityRequest', + 'GetPartitionRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'ListPartitionsRequest', + 'ListPartitionsResponse', + 'Partition', + 'Schema', + 'StorageAccess', + 'StorageFormat', + 'UpdateEntityRequest', + 'StorageSystem', + 'DataSource', + 'ScannedData', + 'Trigger', + 'Action', + 'Asset', + 'AssetStatus', + 'Lake', + 'Zone', + 'State', + 'DataAccessSpec', + 'ResourceAccessSpec', + 'CancelJobRequest', + 'CreateAssetRequest', + 'CreateEnvironmentRequest', + 'CreateLakeRequest', + 'CreateTaskRequest', + 'CreateZoneRequest', + 'DeleteAssetRequest', + 'DeleteEnvironmentRequest', + 'DeleteLakeRequest', + 'DeleteTaskRequest', + 'DeleteZoneRequest', + 'GetAssetRequest', + 'GetEnvironmentRequest', + 'GetJobRequest', + 'GetLakeRequest', + 'GetTaskRequest', + 'GetZoneRequest', + 'ListActionsResponse', + 'ListAssetActionsRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListLakeActionsRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'ListZoneActionsRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'OperationMetadata', + 'RunTaskRequest', + 'RunTaskResponse', + 'UpdateAssetRequest', + 'UpdateEnvironmentRequest', + 'UpdateLakeRequest', + 'UpdateTaskRequest', + 'UpdateZoneRequest', + 'Job', + 'Task', +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py new file mode 100644 index 000000000000..93e0598c390f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py @@ -0,0 +1,492 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import resources +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Environment', + 'Content', + 'Session', + }, +) + + +class Environment(proto.Message): + r"""Environment represents a user-visible compute infrastructure + for analytics within a lake. + + Attributes: + name (str): + Output only. The relative resource name of the environment, + of the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the environment. This ID will be + different if the environment is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Environment creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the environment + was last updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the + environment. + description (str): + Optional. Description of the environment. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the + environment. + infrastructure_spec (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec): + Required. Infrastructure specification for + the Environment. + session_spec (google.cloud.dataplex_v1.types.Environment.SessionSpec): + Optional. Configuration for sessions created + for this environment. + session_status (google.cloud.dataplex_v1.types.Environment.SessionStatus): + Output only. Status of sessions created for + this environment. + endpoints (google.cloud.dataplex_v1.types.Environment.Endpoints): + Output only. URI Endpoints to access sessions + associated with the Environment. + """ + + class InfrastructureSpec(proto.Message): + r"""Configuration for the underlying infrastructure used to run + workloads. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + compute (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.ComputeResources): + Optional. Compute resources needed for + analyze interactive workloads. + + This field is a member of `oneof`_ ``resources``. + os_image (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime): + Required. Software Runtime Configuration for + analyze interactive workloads. + + This field is a member of `oneof`_ ``runtime``. + """ + + class ComputeResources(proto.Message): + r"""Compute resources associated with the analyze interactive + workloads. + + Attributes: + disk_size_gb (int): + Optional. Size in GB of the disk. Default is + 100 GB. + node_count (int): + Optional. Total number of nodes in the + sessions created for this environment. + max_node_count (int): + Optional. Max configurable nodes. If max_node_count > + node_count, then auto-scaling is enabled. + """ + + disk_size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + node_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + + class OsImageRuntime(proto.Message): + r"""Software Runtime Configuration to run Analyze. + + Attributes: + image_version (str): + Required. Dataplex Universal Catalog Image + version. + java_libraries (MutableSequence[str]): + Optional. List of Java jars to be included in + the runtime environment. Valid input includes + Cloud Storage URIs to Jar binaries. For example, + gs://bucket-name/my/path/to/file.jar + python_packages (MutableSequence[str]): + Optional. A list of python packages to be + installed. Valid formats include Cloud Storage + URI to a PIP installable library. For example, + gs://bucket-name/my/path/to/lib.tar.gz + properties (MutableMapping[str, str]): + Optional. Spark properties to provide configuration for use + in sessions created for this environment. The properties to + set on daemon config files. Property keys are specified in + ``prefix:property`` format. The prefix must be "spark". + """ + + image_version: str = proto.Field( + proto.STRING, + number=1, + ) + java_libraries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + compute: 'Environment.InfrastructureSpec.ComputeResources' = proto.Field( + proto.MESSAGE, + number=50, + oneof='resources', + message='Environment.InfrastructureSpec.ComputeResources', + ) + os_image: 'Environment.InfrastructureSpec.OsImageRuntime' = proto.Field( + proto.MESSAGE, + number=100, + oneof='runtime', + message='Environment.InfrastructureSpec.OsImageRuntime', + ) + + class SessionSpec(proto.Message): + r"""Configuration for sessions created for this environment. + + Attributes: + max_idle_duration (google.protobuf.duration_pb2.Duration): + Optional. The idle time configuration of the + session. The session will be auto-terminated at + the end of this period. + enable_fast_startup (bool): + Optional. If True, this causes sessions to be + pre-created and available for faster startup to + enable interactive exploration use-cases. This + defaults to False to avoid additional billed + charges. These can only be set to True for the + environment with name set to "default", and with + default configuration. + """ + + max_idle_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + enable_fast_startup: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class SessionStatus(proto.Message): + r"""Status of sessions created for this environment. + + Attributes: + active (bool): + Output only. Queries over sessions to mark + whether the environment is currently active or + not + """ + + active: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class Endpoints(proto.Message): + r"""URI Endpoints to access sessions associated with the + Environment. + + Attributes: + notebooks (str): + Output only. URI to serve notebook APIs + sql (str): + Output only. URI to serve SQL APIs + """ + + notebooks: str = proto.Field( + proto.STRING, + number=1, + ) + sql: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=8, + enum=resources.State, + ) + infrastructure_spec: InfrastructureSpec = proto.Field( + proto.MESSAGE, + number=100, + message=InfrastructureSpec, + ) + session_spec: SessionSpec = proto.Field( + proto.MESSAGE, + number=101, + message=SessionSpec, + ) + session_status: SessionStatus = proto.Field( + proto.MESSAGE, + number=102, + message=SessionStatus, + ) + endpoints: Endpoints = proto.Field( + proto.MESSAGE, + number=200, + message=Endpoints, + ) + + +class Content(proto.Message): + r"""Content represents a user-visible notebook or a sql script + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + uid (str): + Output only. System generated globally unique + ID for the content. This ID will be different if + the content is deleted and re-created with the + same name. + path (str): + Required. The path for the Content file, + represented as directory structure. Unique + within a lake. Limited to alphanumerics, + hyphens, underscores, dots and slashes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Content creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the content was + last updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the + content. + description (str): + Optional. Description of the content. + data_text (str): + Required. Content data in string format. + + This field is a member of `oneof`_ ``data``. + sql_script (google.cloud.dataplex_v1.types.Content.SqlScript): + Sql Script related configurations. + + This field is a member of `oneof`_ ``content``. + notebook (google.cloud.dataplex_v1.types.Content.Notebook): + Notebook related configurations. + + This field is a member of `oneof`_ ``content``. + """ + + class SqlScript(proto.Message): + r"""Configuration for the Sql Script content. + + Attributes: + engine (google.cloud.dataplex_v1.types.Content.SqlScript.QueryEngine): + Required. Query Engine to be used for the Sql + Query. + """ + class QueryEngine(proto.Enum): + r"""Query Engine Type of the SQL Script. + + Values: + QUERY_ENGINE_UNSPECIFIED (0): + Value was unspecified. + SPARK (2): + Spark SQL Query. + """ + QUERY_ENGINE_UNSPECIFIED = 0 + SPARK = 2 + + engine: 'Content.SqlScript.QueryEngine' = proto.Field( + proto.ENUM, + number=1, + enum='Content.SqlScript.QueryEngine', + ) + + class Notebook(proto.Message): + r"""Configuration for Notebook content. + + Attributes: + kernel_type (google.cloud.dataplex_v1.types.Content.Notebook.KernelType): + Required. Kernel Type of the notebook. + """ + class KernelType(proto.Enum): + r"""Kernel Type of the Jupyter notebook. + + Values: + KERNEL_TYPE_UNSPECIFIED (0): + Kernel Type unspecified. + PYTHON3 (1): + Python 3 Kernel. + """ + KERNEL_TYPE_UNSPECIFIED = 0 + PYTHON3 = 1 + + kernel_type: 'Content.Notebook.KernelType' = proto.Field( + proto.ENUM, + number=1, + enum='Content.Notebook.KernelType', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + path: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + data_text: str = proto.Field( + proto.STRING, + number=9, + oneof='data', + ) + sql_script: SqlScript = proto.Field( + proto.MESSAGE, + number=100, + oneof='content', + message=SqlScript, + ) + notebook: Notebook = proto.Field( + proto.MESSAGE, + number=101, + oneof='content', + message=Notebook, + ) + + +class Session(proto.Message): + r"""Represents an active analyze session running for a user. + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}/sessions/{session_id} + user_id (str): + Output only. Email of user running the + session. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Session start time. + state (google.cloud.dataplex_v1.types.State): + Output only. State of Session + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=4, + enum=resources.State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py new file mode 100644 index 000000000000..81794bb5d454 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py @@ -0,0 +1,876 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Glossary', + 'GlossaryCategory', + 'GlossaryTerm', + 'CreateGlossaryRequest', + 'UpdateGlossaryRequest', + 'DeleteGlossaryRequest', + 'GetGlossaryRequest', + 'ListGlossariesRequest', + 'ListGlossariesResponse', + 'CreateGlossaryCategoryRequest', + 'UpdateGlossaryCategoryRequest', + 'DeleteGlossaryCategoryRequest', + 'GetGlossaryCategoryRequest', + 'ListGlossaryCategoriesRequest', + 'ListGlossaryCategoriesResponse', + 'CreateGlossaryTermRequest', + 'UpdateGlossaryTermRequest', + 'DeleteGlossaryTermRequest', + 'GetGlossaryTermRequest', + 'ListGlossaryTermsRequest', + 'ListGlossaryTermsResponse', + }, +) + + +class Glossary(proto.Message): + r"""A Glossary represents a collection of GlossaryCategories and + GlossaryTerms defined by the user. Glossary is a top level + resource and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + Attributes: + name (str): + Output only. Identifier. The resource name of the Glossary. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + uid (str): + Output only. System generated unique id for + the Glossary. This ID will be different if the + Glossary is deleted and re-created with the same + name. + display_name (str): + Optional. User friendly display name of the + Glossary. This is user-mutable. This will be + same as the GlossaryId, if not specified. + description (str): + Optional. The user-mutable description of the + Glossary. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the Glossary + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the Glossary + was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + Glossary. + term_count (int): + Output only. The number of GlossaryTerms in + the Glossary. + category_count (int): + Output only. The number of GlossaryCategories + in the Glossary. + etag (str): + Optional. Needed for resource freshness + validation. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + term_count: int = proto.Field( + proto.INT32, + number=8, + ) + category_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GlossaryCategory(proto.Message): + r"""A GlossaryCategory represents a collection of + GlossaryCategories and GlossaryTerms within a Glossary that are + related to each other. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + uid (str): + Output only. System generated unique id for + the GlossaryCategory. This ID will be different + if the GlossaryCategory is deleted and + re-created with the same name. + display_name (str): + Optional. User friendly display name of the + GlossaryCategory. This is user-mutable. This + will be same as the GlossaryCategoryId, if not + specified. + description (str): + Optional. The user-mutable description of the + GlossaryCategory. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryCategory was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryCategory was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + GlossaryCategory. + parent (str): + Required. The immediate parent of the GlossaryCategory in + the resource-hierarchy. It can either be a Glossary or a + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + OR + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent: str = proto.Field( + proto.STRING, + number=8, + ) + + +class GlossaryTerm(proto.Message): + r"""GlossaryTerms are the core of Glossary. + A GlossaryTerm holds a rich text description that can be + attached to Entries or specific columns to enrich them. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + GlossaryTerm. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + uid (str): + Output only. System generated unique id for + the GlossaryTerm. This ID will be different if + the GlossaryTerm is deleted and re-created with + the same name. + display_name (str): + Optional. User friendly display name of the + GlossaryTerm. This is user-mutable. This will be + same as the GlossaryTermId, if not specified. + description (str): + Optional. The user-mutable description of the + GlossaryTerm. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryTerm was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryTerm was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + GlossaryTerm. + parent (str): + Required. The immediate parent of the GlossaryTerm in the + resource-hierarchy. It can either be a Glossary or a + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + OR + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent: str = proto.Field( + proto.STRING, + number=8, + ) + + +class CreateGlossaryRequest(proto.Message): + r"""Create Glossary Request + + Attributes: + parent (str): + Required. The parent resource where this Glossary will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + glossary_id (str): + Required. Glossary ID: Glossary identifier. + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to create. + validate_only (bool): + Optional. Validates the request without + actually creating the Glossary. Default: false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + glossary_id: str = proto.Field( + proto.STRING, + number=2, + ) + glossary: 'Glossary' = proto.Field( + proto.MESSAGE, + number=3, + message='Glossary', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateGlossaryRequest(proto.Message): + r"""Update Glossary Request + + Attributes: + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to update. The Glossary's ``name`` + field is used to identify the Glossary to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + validate_only (bool): + Optional. Validates the request without + actually updating the Glossary. Default: false. + """ + + glossary: 'Glossary' = proto.Field( + proto.MESSAGE, + number=1, + message='Glossary', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteGlossaryRequest(proto.Message): + r"""Delete Glossary Request + + Attributes: + name (str): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + etag (str): + Optional. The etag of the Glossary. + If this is provided, it must match the server's + etag. If the etag is provided and does not match + the server-computed etag, the request must fail + with a ABORTED error code. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetGlossaryRequest(proto.Message): + r"""Get Glossary Request + + Attributes: + name (str): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossariesRequest(proto.Message): + r"""List Glossaries Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. The maximum number of Glossaries to + return. The service may return fewer than this + value. If unspecified, at most 50 Glossaries + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaries`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListGlossaries`` must match the call that + provided the page token. + filter (str): + Optional. Filter expression that filters Glossaries listed + in the response. Filters on proto fields of Glossary are + supported. Examples of using a filter are: + + - ``display_name="my-glossary"`` + - ``categoryCount=1`` + - ``termCount=0`` + order_by (str): + Optional. Order by expression that orders Glossaries listed + in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossariesResponse(proto.Message): + r"""List Glossaries Response + + Attributes: + glossaries (MutableSequence[google.cloud.dataplex_v1.types.Glossary]): + Lists the Glossaries in the specified parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + glossaries: MutableSequence['Glossary'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Glossary', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateGlossaryCategoryRequest(proto.Message): + r"""Creates a new GlossaryCategory under the specified Glossary. + + Attributes: + parent (str): + Required. The parent resource where this GlossaryCategory + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + category_id (str): + Required. GlossaryCategory identifier. + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + category_id: str = proto.Field( + proto.STRING, + number=2, + ) + category: 'GlossaryCategory' = proto.Field( + proto.MESSAGE, + number=3, + message='GlossaryCategory', + ) + + +class UpdateGlossaryCategoryRequest(proto.Message): + r"""Update GlossaryCategory Request + + Attributes: + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify the + GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + category: 'GlossaryCategory' = proto.Field( + proto.MESSAGE, + number=1, + message='GlossaryCategory', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteGlossaryCategoryRequest(proto.Message): + r"""Delete GlossaryCategory Request + + Attributes: + name (str): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetGlossaryCategoryRequest(proto.Message): + r"""Get GlossaryCategory Request + + Attributes: + name (str): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossaryCategoriesRequest(proto.Message): + r"""List GlossaryCategories Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + page_size (int): + Optional. The maximum number of + GlossaryCategories to return. The service may + return fewer than this value. If unspecified, at + most 50 GlossaryCategories will be returned. The + maximum value is 1000; values above 1000 will be + coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaryCategories`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListGlossaryCategories`` must match the call + that provided the page token. + filter (str): + Optional. Filter expression that filters GlossaryCategories + listed in the response. Filters are supported on the + following fields: + + - immediate_parent + + Examples of using a filter are: + ------------------------------- + + ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + ------------------------------------------------------------------------------------------------------- + + ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + + This will only return the GlossaryCategories that are + directly nested under the specified parent. + order_by (str): + Optional. Order by expression that orders GlossaryCategories + listed in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossaryCategoriesResponse(proto.Message): + r"""List GlossaryCategories Response + + Attributes: + categories (MutableSequence[google.cloud.dataplex_v1.types.GlossaryCategory]): + Lists the GlossaryCategories in the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + categories: MutableSequence['GlossaryCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='GlossaryCategory', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateGlossaryTermRequest(proto.Message): + r"""Creates a new GlossaryTerm under the specified Glossary. + + Attributes: + parent (str): + Required. The parent resource where the GlossaryTerm will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + term_id (str): + Required. GlossaryTerm identifier. + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + term_id: str = proto.Field( + proto.STRING, + number=2, + ) + term: 'GlossaryTerm' = proto.Field( + proto.MESSAGE, + number=3, + message='GlossaryTerm', + ) + + +class UpdateGlossaryTermRequest(proto.Message): + r"""Update GlossaryTerm Request + + Attributes: + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + term: 'GlossaryTerm' = proto.Field( + proto.MESSAGE, + number=1, + message='GlossaryTerm', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteGlossaryTermRequest(proto.Message): + r"""Delete GlossaryTerm Request + + Attributes: + name (str): + Required. The name of the GlossaryTerm to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetGlossaryTermRequest(proto.Message): + r"""Get GlossaryTerm Request + + Attributes: + name (str): + Required. The name of the GlossaryTerm to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossaryTermsRequest(proto.Message): + r"""List GlossaryTerms Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. The maximum number of GlossaryTerms + to return. The service may return fewer than + this value. If unspecified, at most 50 + GlossaryTerms will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaryTerms`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListGlossaryTerms`` must match the call that + provided the page token. + filter (str): + Optional. Filter expression that filters GlossaryTerms + listed in the response. Filters are supported on the + following fields: + + - immediate_parent + + Examples of using a filter are: + ------------------------------- + + ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + ------------------------------------------------------------------------------------------------------- + + ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + + This will only return the GlossaryTerms that are directly + nested under the specified parent. + order_by (str): + Optional. Order by expression that orders GlossaryTerms + listed in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossaryTermsResponse(proto.Message): + r"""List GlossaryTerms Response + + Attributes: + terms (MutableSequence[google.cloud.dataplex_v1.types.GlossaryTerm]): + Lists the GlossaryTerms in the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + terms: MutableSequence['GlossaryTerm'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='GlossaryTerm', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py new file mode 100644 index 000000000000..7d114d02aab3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py @@ -0,0 +1,3079 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'EntryView', + 'TransferStatus', + 'AspectType', + 'EntryGroup', + 'EntryType', + 'Aspect', + 'AspectSource', + 'Entry', + 'EntrySource', + 'CreateEntryGroupRequest', + 'UpdateEntryGroupRequest', + 'DeleteEntryGroupRequest', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'GetEntryGroupRequest', + 'CreateEntryTypeRequest', + 'UpdateEntryTypeRequest', + 'DeleteEntryTypeRequest', + 'ListEntryTypesRequest', + 'ListEntryTypesResponse', + 'GetEntryTypeRequest', + 'CreateAspectTypeRequest', + 'UpdateAspectTypeRequest', + 'DeleteAspectTypeRequest', + 'ListAspectTypesRequest', + 'ListAspectTypesResponse', + 'GetAspectTypeRequest', + 'CreateEntryRequest', + 'UpdateEntryRequest', + 'DeleteEntryRequest', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'GetEntryRequest', + 'LookupEntryRequest', + 'SearchEntriesRequest', + 'SearchEntriesResult', + 'SearchEntriesResponse', + 'ImportItem', + 'CreateMetadataJobRequest', + 'GetMetadataJobRequest', + 'ListMetadataJobsRequest', + 'ListMetadataJobsResponse', + 'CancelMetadataJobRequest', + 'MetadataJob', + 'EntryLink', + 'CreateEntryLinkRequest', + 'DeleteEntryLinkRequest', + 'GetEntryLinkRequest', + }, +) + + +class EntryView(proto.Enum): + r"""View for controlling which parts of an entry are to be + returned. + + Values: + ENTRY_VIEW_UNSPECIFIED (0): + Unspecified EntryView. Defaults to FULL. + BASIC (1): + Returns entry only, without aspects. + FULL (2): + Returns all required aspects as well as the + keys of all non-required aspects. + CUSTOM (3): + Returns aspects matching custom fields in + GetEntryRequest. If the number of aspects + exceeds 100, the first 100 will be returned. + ALL (4): + Returns all aspects. If the number of aspects + exceeds 100, the first 100 will be returned. + """ + ENTRY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + CUSTOM = 3 + ALL = 4 + + +class TransferStatus(proto.Enum): + r"""Denotes the transfer status of a resource. It is unspecified + for resources created from Dataplex API. + + Values: + TRANSFER_STATUS_UNSPECIFIED (0): + The default value. It is set for resources + that were not subject for migration from Data + Catalog service. + TRANSFER_STATUS_MIGRATED (1): + Indicates that a resource was migrated from + Data Catalog service but it hasn't been + transferred yet. In particular the resource + cannot be updated from Dataplex API. + TRANSFER_STATUS_TRANSFERRED (2): + Indicates that a resource was transferred + from Data Catalog service. The resource can only + be updated from Dataplex API. + """ + TRANSFER_STATUS_UNSPECIFIED = 0 + TRANSFER_STATUS_MIGRATED = 1 + TRANSFER_STATUS_TRANSFERRED = 2 + + +class AspectType(proto.Message): + r"""AspectType is a template for creating Aspects, and represents + the JSON-schema for a given Entry, for example, BigQuery Table + Schema. + + Attributes: + name (str): + Output only. The relative resource name of the AspectType, + of the form: + projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. + uid (str): + Output only. System generated globally unique + ID for the AspectType. If you delete and + recreate the AspectType with the same name, then + this ID will be different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the AspectType was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the AspectType was + last updated. + description (str): + Optional. Description of the AspectType. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + AspectType. + etag (str): + The service computes this checksum. The + client may send it on update and delete requests + to ensure it has an up-to-date value before + proceeding. + authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): + Immutable. Defines the Authorization for this + type. + metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Required. MetadataTemplate of the aspect. + transfer_status (google.cloud.dataplex_v1.types.TransferStatus): + Output only. Denotes the transfer status of + the Aspect Type. It is unspecified for Aspect + Types created from Dataplex API. + """ + + class Authorization(proto.Message): + r"""Authorization for an AspectType. + + Attributes: + alternate_use_permission (str): + Immutable. The IAM permission grantable on + the EntryGroup to allow access to instantiate + Aspects of Dataplex Universal Catalog owned + AspectTypes, only settable for Dataplex + Universal Catalog owned Types. + """ + + alternate_use_permission: str = proto.Field( + proto.STRING, + number=1, + ) + + class MetadataTemplate(proto.Message): + r"""MetadataTemplate definition for an AspectType. + + Attributes: + index (int): + Optional. Index is used to encode Template + messages. The value of index can range between 1 + and 2,147,483,647. Index must be unique within + all fields in a Template. (Nested Templates can + reuse indexes). Once a Template is defined, the + index cannot be changed, because it identifies + the field in the actual storage format. Index is + a mandatory field, but it is optional for top + level fields, and map/array "values" + definitions. + name (str): + Required. The name of the field. + type_ (str): + Required. The datatype of this field. The following values + are supported: + + Primitive types: + + - string + - int + - bool + - double + - datetime. Must be of the format RFC3339 UTC "Zulu" + (Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z"). + + Complex types: + + - enum + - array + - map + - record + record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): + Optional. Field definition. You must specify + it if the type is record. It defines the nested + fields. + enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): + Optional. The list of values for an enum + type. You must define it if the type is enum. + map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Optional. If the type is map, set map_items. map_items can + refer to a primitive field or a complex (record only) field. + To specify a primitive field, you only need to set name and + type in the nested MetadataTemplate. The recommended value + for the name field is item, as this isn't used in the actual + payload. + array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): + Optional. If the type is array, set array_items. array_items + can refer to a primitive field or a complex (record only) + field. To specify a primitive field, you only need to set + name and type in the nested MetadataTemplate. The + recommended value for the name field is item, as this isn't + used in the actual payload. + type_id (str): + Optional. You can use type id if this + definition of the field needs to be reused + later. The type id must be unique across the + entire template. You can only specify it if the + field type is record. + type_ref (str): + Optional. A reference to another field + definition (not an inline definition). The value + must be equal to the value of an id field + defined elsewhere in the MetadataTemplate. Only + fields with record type can refer to other + fields. + constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): + Optional. Specifies the constraints on this + field. + annotations (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Annotations): + Optional. Specifies annotations on this + field. + """ + + class EnumValue(proto.Message): + r"""Definition of Enumvalue, to be used for enum fields. + + Attributes: + index (int): + Required. Index for the enum value. It can't + be modified. + name (str): + Required. Name of the enumvalue. This is the + actual value that the aspect can contain. + deprecated (str): + Optional. You can set this message if you + need to deprecate an enum value. + """ + + index: int = proto.Field( + proto.INT32, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + deprecated: str = proto.Field( + proto.STRING, + number=3, + ) + + class Constraints(proto.Message): + r"""Definition of the constraints of a field. + + Attributes: + required (bool): + Optional. Marks this field as optional or + required. + """ + + required: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class Annotations(proto.Message): + r"""Definition of the annotations of a field. + + Attributes: + deprecated (str): + Optional. Marks a field as deprecated. You + can include a deprecation message. + display_name (str): + Optional. Display name for a field. + description (str): + Optional. Description for a field. + display_order (int): + Optional. Display order for a field. You can + use this to reorder where a field is rendered. + string_type (str): + Optional. You can use String Type annotations to specify + special meaning to string fields. The following values are + supported: + + - richText: The field must be interpreted as a rich text + field. + - url: A fully qualified URL link. + - resource: A service qualified resource reference. + string_values (MutableSequence[str]): + Optional. Suggested hints for string fields. + You can use them to suggest values to users + through console. + """ + + deprecated: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + display_order: int = proto.Field( + proto.INT32, + number=4, + ) + string_type: str = proto.Field( + proto.STRING, + number=6, + ) + string_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + index: int = proto.Field( + proto.INT32, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=5, + ) + record_fields: MutableSequence['AspectType.MetadataTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AspectType.MetadataTemplate', + ) + enum_values: MutableSequence['AspectType.MetadataTemplate.EnumValue'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='AspectType.MetadataTemplate.EnumValue', + ) + map_items: 'AspectType.MetadataTemplate' = proto.Field( + proto.MESSAGE, + number=10, + message='AspectType.MetadataTemplate', + ) + array_items: 'AspectType.MetadataTemplate' = proto.Field( + proto.MESSAGE, + number=11, + message='AspectType.MetadataTemplate', + ) + type_id: str = proto.Field( + proto.STRING, + number=12, + ) + type_ref: str = proto.Field( + proto.STRING, + number=13, + ) + constraints: 'AspectType.MetadataTemplate.Constraints' = proto.Field( + proto.MESSAGE, + number=50, + message='AspectType.MetadataTemplate.Constraints', + ) + annotations: 'AspectType.MetadataTemplate.Annotations' = proto.Field( + proto.MESSAGE, + number=51, + message='AspectType.MetadataTemplate.Annotations', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + authorization: Authorization = proto.Field( + proto.MESSAGE, + number=52, + message=Authorization, + ) + metadata_template: MetadataTemplate = proto.Field( + proto.MESSAGE, + number=53, + message=MetadataTemplate, + ) + transfer_status: 'TransferStatus' = proto.Field( + proto.ENUM, + number=202, + enum='TransferStatus', + ) + + +class EntryGroup(proto.Message): + r"""An Entry Group represents a logical grouping of one or more + Entries. + + Attributes: + name (str): + Output only. The relative resource name of the EntryGroup, + in the format + projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. + uid (str): + Output only. System generated globally unique + ID for the EntryGroup. If you delete and + recreate the EntryGroup with the same name, this + ID will be different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryGroup was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryGroup was + last updated. + description (str): + Optional. Description of the EntryGroup. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + EntryGroup. + etag (str): + This checksum is computed by the service, and + might be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + transfer_status (google.cloud.dataplex_v1.types.TransferStatus): + Output only. Denotes the transfer status of + the Entry Group. It is unspecified for Entry + Group created from Dataplex API. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + transfer_status: 'TransferStatus' = proto.Field( + proto.ENUM, + number=202, + enum='TransferStatus', + ) + + +class EntryType(proto.Message): + r"""Entry Type is a template for creating Entries. + + Attributes: + name (str): + Output only. The relative resource name of the EntryType, of + the form: + projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}. + uid (str): + Output only. System generated globally unique + ID for the EntryType. This ID will be different + if the EntryType is deleted and re-created with + the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryType was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the EntryType was + last updated. + description (str): + Optional. Description of the EntryType. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + EntryType. + etag (str): + Optional. This checksum is computed by the + service, and might be sent on update and delete + requests to ensure the client has an up-to-date + value before proceeding. + type_aliases (MutableSequence[str]): + Optional. Indicates the classes this Entry + Type belongs to, for example, TABLE, DATABASE, + MODEL. + platform (str): + Optional. The platform that Entries of this + type belongs to. + system (str): + Optional. The system that Entries of this + type belongs to. Examples include CloudSQL, + MariaDB etc + required_aspects (MutableSequence[google.cloud.dataplex_v1.types.EntryType.AspectInfo]): + AspectInfo for the entry type. + authorization (google.cloud.dataplex_v1.types.EntryType.Authorization): + Immutable. Authorization defined for this + type. + """ + + class AspectInfo(proto.Message): + r""" + + Attributes: + type_ (str): + Required aspect type for the entry type. + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + + class Authorization(proto.Message): + r"""Authorization for an Entry Type. + + Attributes: + alternate_use_permission (str): + Immutable. The IAM permission grantable on + the Entry Group to allow access to instantiate + Entries of Dataplex Universal Catalog owned + Entry Types, only settable for Dataplex + Universal Catalog owned Types. + """ + + alternate_use_permission: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + type_aliases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + platform: str = proto.Field( + proto.STRING, + number=10, + ) + system: str = proto.Field( + proto.STRING, + number=11, + ) + required_aspects: MutableSequence[AspectInfo] = proto.RepeatedField( + proto.MESSAGE, + number=50, + message=AspectInfo, + ) + authorization: Authorization = proto.Field( + proto.MESSAGE, + number=51, + message=Authorization, + ) + + +class Aspect(proto.Message): + r"""An aspect is a single piece of metadata describing an entry. + + Attributes: + aspect_type (str): + Output only. The resource name of the type + used to create this Aspect. + path (str): + Output only. The path in the entry under + which the aspect is attached. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Aspect was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Aspect was + last updated. + data (google.protobuf.struct_pb2.Struct): + Required. The content of the aspect, + according to its aspect type schema. The maximum + size of the field is 120KB (encoded as UTF-8). + aspect_source (google.cloud.dataplex_v1.types.AspectSource): + Optional. Information related to the source + system of the aspect. + """ + + aspect_type: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=8, + message=struct_pb2.Struct, + ) + aspect_source: 'AspectSource' = proto.Field( + proto.MESSAGE, + number=9, + message='AspectSource', + ) + + +class AspectSource(proto.Message): + r"""Information related to the source system of the aspect. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the aspect was created in the source + system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time the aspect was last updated in the + source system. + data_version (str): + The version of the data format used to + produce this data. This field is used to + indicated when the underlying data format + changes (e.g., schema modifications, changes to + the source URL format definition, etc). + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + data_version: str = proto.Field( + proto.STRING, + number=12, + ) + + +class Entry(proto.Message): + r"""An entry is a representation of a data resource that can be + described by various metadata. + + Attributes: + name (str): + Identifier. The relative resource name of the entry, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. + entry_type (str): + Required. Immutable. The relative resource name of the entry + type that was used to create this entry, in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entry was + created in Dataplex Universal Catalog. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entry was last + updated in Dataplex Universal Catalog. + aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): + Optional. The aspects that are attached to the entry. + Depending on how the aspect is attached to the entry, the + format of the aspect key can be one of the following: + + - If the aspect is attached directly to the entry: + ``{project_id_or_number}.{location_id}.{aspect_type_id}`` + - If the aspect is attached to an entry's path: + ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` + parent_entry (str): + Optional. Immutable. The resource name of the parent entry, + in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. + fully_qualified_name (str): + Optional. A name for the entry that can be referenced by an + external system. For more information, see `Fully qualified + names `__. + The maximum size of the field is 4000 characters. + entry_source (google.cloud.dataplex_v1.types.EntrySource): + Optional. Information related to the source + system of the data resource that is represented + by the entry. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_type: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + aspects: MutableMapping[str, 'Aspect'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=9, + message='Aspect', + ) + parent_entry: str = proto.Field( + proto.STRING, + number=10, + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=12, + ) + entry_source: 'EntrySource' = proto.Field( + proto.MESSAGE, + number=15, + message='EntrySource', + ) + + +class EntrySource(proto.Message): + r"""Information related to the source system of the data resource + that is represented by the entry. + + Attributes: + resource (str): + The name of the resource in the source + system. Maximum length is 4,000 characters. + system (str): + The name of the source system. + Maximum length is 64 characters. + platform (str): + The platform containing the source system. + Maximum length is 64 characters. + display_name (str): + A user-friendly display name. + Maximum length is 500 characters. + description (str): + A description of the data resource. + Maximum length is 2,000 characters. + labels (MutableMapping[str, str]): + User-defined labels. + The maximum size of keys and values is 128 + characters each. + ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): + Immutable. The entries representing the + ancestors of the data resource in the source + system. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was created in the + source system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was last updated in the source + system. If the entry exists in the system and its + ``EntrySource`` has ``update_time`` populated, further + updates to the ``EntrySource`` of the entry must provide + incremental updates to its ``update_time``. + location (str): + Output only. Location of the resource in the + source system. You can search the entry by this + location. By default, this should match the + location of the entry group containing this + entry. A different value allows capturing the + source location for data external to Google + Cloud. + """ + + class Ancestor(proto.Message): + r"""Information about individual items in the hierarchy that is + associated with the data resource. + + Attributes: + name (str): + Optional. The name of the ancestor resource. + type_ (str): + Optional. The type of the ancestor resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + + resource: str = proto.Field( + proto.STRING, + number=1, + ) + system: str = proto.Field( + proto.STRING, + number=2, + ) + platform: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + ancestors: MutableSequence[Ancestor] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=Ancestor, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + location: str = proto.Field( + proto.STRING, + number=12, + ) + + +class CreateEntryGroupRequest(proto.Message): + r"""Create EntryGroup Request. + + Attributes: + parent (str): + Required. The resource name of the entryGroup, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + entry_group_id (str): + Required. EntryGroup identifier. + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=3, + message='EntryGroup', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntryGroupRequest(proto.Message): + r"""Update EntryGroup Request. + + Attributes: + entry_group (google.cloud.dataplex_v1.types.EntryGroup): + Required. EntryGroup Resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. The service validates the request, + without performing any mutations. The default is + false. + """ + + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntryGroupRequest(proto.Message): + r"""Delete EntryGroup Request. + + Attributes: + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteEntryGroupRequest method returns an + ABORTED error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntryGroupsRequest(proto.Message): + r"""List entryGroups request. + + Attributes: + parent (str): + Required. The resource name of the entryGroup location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of EntryGroups to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 EntryGroups. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEntryGroups`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provide to ``ListEntryGroups`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntryGroupsResponse(proto.Message): + r"""List entry groups response. + + Attributes: + entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): + Entry groups under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEntryGroupRequest(proto.Message): + r"""Get EntryGroup request. + + Attributes: + name (str): + Required. The resource name of the EntryGroup: + ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEntryTypeRequest(proto.Message): + r"""Create EntryType Request. + + Attributes: + parent (str): + Required. The resource name of the EntryType, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + entry_type_id (str): + Required. EntryType identifier. + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_type_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_type: 'EntryType' = proto.Field( + proto.MESSAGE, + number=3, + message='EntryType', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntryTypeRequest(proto.Message): + r"""Update EntryType Request. + + Attributes: + entry_type (google.cloud.dataplex_v1.types.EntryType): + Required. EntryType Resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + entry_type: 'EntryType' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryType', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntryTypeRequest(proto.Message): + r"""Delete EntryType Request. + + Attributes: + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteEntryTypeRequest method returns an ABORTED + error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntryTypesRequest(proto.Message): + r"""List EntryTypes request + + Attributes: + parent (str): + Required. The resource name of the EntryType location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of EntryTypes to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 EntryTypes. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEntryTypes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provided to ``ListEntryTypes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. + order_by (str): + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntryTypesResponse(proto.Message): + r"""List EntryTypes response. + + Attributes: + entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): + EntryTypes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + entry_types: MutableSequence['EntryType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEntryTypeRequest(proto.Message): + r"""Get EntryType request. + + Attributes: + name (str): + Required. The resource name of the EntryType: + ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAspectTypeRequest(proto.Message): + r"""Create AspectType Request. + + Attributes: + parent (str): + Required. The resource name of the AspectType, of the form: + projects/{project_number}/locations/{location_id} where + ``location_id`` refers to a Google Cloud region. + aspect_type_id (str): + Required. AspectType identifier. + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + aspect_type_id: str = proto.Field( + proto.STRING, + number=2, + ) + aspect_type: 'AspectType' = proto.Field( + proto.MESSAGE, + number=3, + message='AspectType', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateAspectTypeRequest(proto.Message): + r"""Update AspectType Request + + Attributes: + aspect_type (google.cloud.dataplex_v1.types.AspectType): + Required. AspectType Resource + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + aspect_type: 'AspectType' = proto.Field( + proto.MESSAGE, + number=1, + message='AspectType', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteAspectTypeRequest(proto.Message): + r"""Delete AspectType Request. + + Attributes: + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteAspectTypeRequest method returns an + ABORTED error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAspectTypesRequest(proto.Message): + r"""List AspectTypes request. + + Attributes: + parent (str): + Required. The resource name of the AspectType location, of + the form: + ``projects/{project_number}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of AspectTypes to + return. The service may return fewer than this + value. If unspecified, the service returns at + most 10 AspectTypes. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListAspectTypes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters you + provide to ``ListAspectTypes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. + order_by (str): + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAspectTypesResponse(proto.Message): + r"""List AspectTypes response. + + Attributes: + aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): + AspectTypes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + aspect_types: MutableSequence['AspectType'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AspectType', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetAspectTypeRequest(proto.Message): + r"""Get AspectType request. + + Attributes: + name (str): + Required. The resource name of the AspectType: + ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEntryRequest(proto.Message): + r"""Create Entry request. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + entry_id (str): + Required. Entry identifier. It has to be unique within an + Entry Group. + + Entries corresponding to Google Cloud resources use an Entry + ID format based on `full resource + names `__. + The format is a full resource name of the resource without + the prefix double slashes in the API service name part of + the full resource name. This allows retrieval of entries + using their associated resource name. + + For example, if the full resource name of a resource is + ``//library.googleapis.com/shelves/shelf1/books/book2``, + then the suggested entry_id is + ``library.googleapis.com/shelves/shelf1/books/book2``. + + It is also suggested to follow the same convention for + entries corresponding to resources from providers or systems + other than Google Cloud. + + The maximum size of the field is 4000 characters. + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=3, + message='Entry', + ) + + +class UpdateEntryRequest(proto.Message): + r"""Update Entry request. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Required. Entry resource. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. To update Aspects, the + update_mask must contain the value "aspects". + + If the update_mask is empty, the service will update all + modifiable fields present in the request. + allow_missing (bool): + Optional. If set to true and the entry + doesn't exist, the service will create it. + delete_missing_aspects (bool): + Optional. If set to true and the aspect_keys specify aspect + ranges, the service deletes any existing aspects from that + range that weren't provided in the request. + aspect_keys (MutableSequence[str]): + Optional. The map keys of the Aspects which the service + should modify. It supports the following syntaxes: + + - ```` - matches an aspect of the + given type and empty path. + - ``@path`` - matches an aspect of + the given type and specified path. For example, to attach + an aspect to a field that is specified by the ``schema`` + aspect, the path should have the format + ``Schema.``. + - ``@*`` - matches aspects of the + given type for all paths. + - ``*@path`` - matches aspects of all types on the given + path. + + The service will not remove existing aspects matching the + syntax unless ``delete_missing_aspects`` is set to true. + + If this field is left empty, the service treats it as + specifying exactly those Aspects present in the request. + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + delete_missing_aspects: bool = proto.Field( + proto.BOOL, + number=4, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class DeleteEntryRequest(proto.Message): + r"""Delete Entry request. + + Attributes: + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEntriesRequest(proto.Message): + r"""List Entries request. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. + page_size (int): + Optional. Number of items to return per page. If there are + remaining results, the service returns a next_page_token. If + unspecified, the service returns at most 10 Entries. The + maximum value is 100; values above 100 will be coerced to + 100. + page_token (str): + Optional. Page token received from a previous + ``ListEntries`` call. Provide this to retrieve the + subsequent page. + filter (str): + Optional. A filter on the entries to return. Filters are + case-sensitive. You can filter the request by the following + fields: + + - entry_type + - entry_source.display_name + + The comparison operators are =, !=, <, >, <=, >=. The + service compares strings according to lexical order. + + You can use the logical operators AND, OR, NOT in the + filter. + + You can use Wildcard "\*", but for entry_type you need to + provide the full project id or number. + + Example filter expressions: + + - "entry_source.display_name=AnExampleDisplayName" + - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" + - "entry_type=projects/example-project/locations/us/entryTypes/a\* + OR entry_type=projects/another-project/locations/\*" + - "NOT entry_source.display_name=AnotherExampleDisplayName". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListEntriesResponse(proto.Message): + r"""List Entries response. + + Attributes: + entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): + The list of entries under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + entries: MutableSequence['Entry'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entry', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEntryRequest(proto.Message): + r"""Get Entry request. + + Attributes: + name (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + view (google.cloud.dataplex_v1.types.EntryView): + Optional. View to control which parts of an + entry the service should return. + aspect_types (MutableSequence[str]): + Optional. Limits the aspects returned to the + provided aspect types. It only works for CUSTOM + view. + paths (MutableSequence[str]): + Optional. Limits the aspects returned to + those associated with the provided paths within + the Entry. It only works for CUSTOM view. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'EntryView' = proto.Field( + proto.ENUM, + number=2, + enum='EntryView', + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class LookupEntryRequest(proto.Message): + r"""Lookup Entry request using permissions in the source system. + + Attributes: + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/{location}``. + view (google.cloud.dataplex_v1.types.EntryView): + Optional. View to control which parts of an + entry the service should return. + aspect_types (MutableSequence[str]): + Optional. Limits the aspects returned to the + provided aspect types. It only works for CUSTOM + view. + paths (MutableSequence[str]): + Optional. Limits the aspects returned to + those associated with the provided paths within + the Entry. It only works for CUSTOM view. + entry (str): + Required. The resource name of the Entry: + ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'EntryView' = proto.Field( + proto.ENUM, + number=2, + enum='EntryView', + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + entry: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchEntriesRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The project to which the request should be + attributed in the following form: + ``projects/{project}/locations/global``. + query (str): + Required. The query against which entries in scope should be + matched. The query syntax is defined in `Search syntax for + Dataplex Universal + Catalog `__. + page_size (int): + Optional. Number of results in the search page. If <=0, then + defaults to 10. Max limit for page_size is 1000. Throws an + invalid argument for page_size > 1000. + page_token (str): + Optional. Page token received from a previous + ``SearchEntries`` call. Provide this to retrieve the + subsequent page. + order_by (str): + Optional. Specifies the ordering of results. Supported + values are: + + - ``relevance`` + - ``last_modified_timestamp`` + - ``last_modified_timestamp asc`` + scope (str): + Optional. The scope under which the search should be + operating. It must either be ``organizations/`` or + ``projects/``. If it is unspecified, it + defaults to the organization where the project provided in + ``name`` is located. + semantic_search (bool): + Optional. Specifies whether the search should + understand the meaning and intent behind the + query, rather than just matching keywords. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + scope: str = proto.Field( + proto.STRING, + number=7, + ) + semantic_search: bool = proto.Field( + proto.BOOL, + number=11, + ) + + +class SearchEntriesResult(proto.Message): + r"""A single result of a SearchEntries request. + + Attributes: + linked_resource (str): + Linked resource name. + dataplex_entry (google.cloud.dataplex_v1.types.Entry): + + snippets (google.cloud.dataplex_v1.types.SearchEntriesResult.Snippets): + Snippets. + """ + + class Snippets(proto.Message): + r"""Snippets for the entry, contains HTML-style highlighting for + matched tokens, will be used in UI. + + Attributes: + dataplex_entry (google.cloud.dataplex_v1.types.Entry): + Entry + """ + + dataplex_entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + + linked_resource: str = proto.Field( + proto.STRING, + number=8, + ) + dataplex_entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=9, + message='Entry', + ) + snippets: Snippets = proto.Field( + proto.MESSAGE, + number=12, + message=Snippets, + ) + + +class SearchEntriesResponse(proto.Message): + r""" + + Attributes: + results (MutableSequence[google.cloud.dataplex_v1.types.SearchEntriesResult]): + The results matching the search query. + total_size (int): + The estimated total number of matching + entries. This number isn't guaranteed to be + accurate. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that the service couldn't reach. + Search results don't include data from these + locations. + """ + + @property + def raw_page(self): + return self + + results: MutableSequence['SearchEntriesResult'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SearchEntriesResult', + ) + total_size: int = proto.Field( + proto.INT32, + number=2, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class ImportItem(proto.Message): + r"""An object that describes the values that you want to set for an + entry and its attached aspects when you import metadata. Used when + you run a metadata import job. See + [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. + + You provide a collection of import items in a metadata import file. + For more information about how to create a metadata import file, see + `Metadata import + file `__. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Information about an entry and its attached + aspects. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Information about the entry link. User should provide either + one of the entry or entry_link. While providing entry_link, + user should not provide update_mask and aspect_keys. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update, in paths that are relative to the + ``Entry`` resource. Separate each field with a comma. + + In ``FULL`` entry sync mode, Dataplex Universal Catalog + includes the paths of all of the fields for an entry that + can be modified, including aspects. This means that Dataplex + Universal Catalog replaces the existing entry with the entry + in the metadata import file. All modifiable fields are + updated, regardless of the fields that are listed in the + update mask, and regardless of whether a field is present in + the ``entry`` object. + + The ``update_mask`` field is ignored when an entry is + created or re-created. + + In an aspect-only metadata job (when entry sync mode is + ``NONE``), set this value to ``aspects``. + + Dataplex Universal Catalog also determines which entries and + aspects to modify by comparing the values and timestamps + that you provide in the metadata import file with the values + and timestamps that exist in your project. For more + information, see `Comparison + logic `__. + aspect_keys (MutableSequence[str]): + The aspects to modify. Supports the following syntaxes: + + - ``{aspect_type_reference}``: matches aspects that belong + to the specified aspect type and are attached directly to + the entry. + - ``{aspect_type_reference}@{path}``: matches aspects that + belong to the specified aspect type and path. + - ``{aspect_type_reference}@*`` : matches aspects of the + given type for all paths. + - ``*@path`` : matches aspects of all types on the given + path. + + Replace ``{aspect_type_reference}`` with a reference to the + aspect type, in the format + ``{project_id_or_number}.{location_id}.{aspect_type_id}``. + + In ``FULL`` entry sync mode, if you leave this field empty, + it is treated as specifying exactly those aspects that are + present within the specified entry. Dataplex Universal + Catalog implicitly adds the keys for all of the required + aspects of an entry. + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + entry_link: 'EntryLink' = proto.Field( + proto.MESSAGE, + number=4, + message='EntryLink', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateMetadataJobRequest(proto.Message): + r"""Create metadata job request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique ID + is generated with the prefix ``metadata-job-``. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + metadata_job: 'MetadataJob' = proto.Field( + proto.MESSAGE, + number=2, + message='MetadataJob', + ) + metadata_job_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetMetadataJobRequest(proto.Message): + r"""Get metadata job request. + + Attributes: + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMetadataJobsRequest(proto.Message): + r"""List metadata jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + page_size (int): + Optional. The maximum number of metadata jobs + to return. The service might return fewer jobs + than this value. If unspecified, at most 10 jobs + are returned. The maximum value is 1,000. + page_token (str): + Optional. The page token received from a previous + ``ListMetadataJobs`` call. Provide this token to retrieve + the subsequent page of results. When paginating, all other + parameters that are provided to the ``ListMetadataJobs`` + request must match the call that provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - ``labels.key1 = "value1"`` + - ``labels:key1`` + - ``name = "value"`` + + You can combine filters with ``AND``, ``OR``, and ``NOT`` + operators. + order_by (str): + Optional. The field to sort the results by, either ``name`` + or ``create_time``. If not specified, the ordering is + undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMetadataJobsResponse(proto.Message): + r"""List metadata jobs response. + + Attributes: + metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): + Metadata jobs under the specified parent + location. + next_page_token (str): + A token to retrieve the next page of results. + If there are no more results in the list, the + value is empty. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + metadata_jobs: MutableSequence['MetadataJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MetadataJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CancelMetadataJobRequest(proto.Message): + r"""Cancel metadata job request. + + Attributes: + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataJob(proto.Message): + r"""A metadata job resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The name of the resource that the + configuration is applied to, in the format + ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + uid (str): + Output only. A system-generated, globally + unique ID for the metadata job. If the metadata + job is deleted and then re-created with the same + name, this ID is different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels. + type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): + Required. Metadata job type. + import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): + Import job specification. + + This field is a member of `oneof`_ ``spec``. + export_spec (google.cloud.dataplex_v1.types.MetadataJob.ExportJobSpec): + Export job specification. + + This field is a member of `oneof`_ ``spec``. + import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): + Output only. Import job result. + + This field is a member of `oneof`_ ``result``. + export_result (google.cloud.dataplex_v1.types.MetadataJob.ExportJobResult): + Output only. Export job result. + + This field is a member of `oneof`_ ``result``. + status (google.cloud.dataplex_v1.types.MetadataJob.Status): + Output only. Metadata job status. + """ + class Type(proto.Enum): + r"""Metadata job type. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + IMPORT (1): + Import job. + EXPORT (2): + Export job. + """ + TYPE_UNSPECIFIED = 0 + IMPORT = 1 + EXPORT = 2 + + class ImportJobResult(proto.Message): + r"""Results from a metadata import job. + + Attributes: + deleted_entries (int): + Output only. The total number of entries that + were deleted. + updated_entries (int): + Output only. The total number of entries that + were updated. + created_entries (int): + Output only. The total number of entries that + were created. + unchanged_entries (int): + Output only. The total number of entries that + were unchanged. + recreated_entries (int): + Output only. The total number of entries that + were recreated. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + deleted_entry_links (int): + Output only. The total number of entry links + that were successfully deleted. + created_entry_links (int): + Output only. The total number of entry links + that were successfully created. + unchanged_entry_links (int): + Output only. The total number of entry links + that were left unchanged. + """ + + deleted_entries: int = proto.Field( + proto.INT64, + number=1, + ) + updated_entries: int = proto.Field( + proto.INT64, + number=2, + ) + created_entries: int = proto.Field( + proto.INT64, + number=3, + ) + unchanged_entries: int = proto.Field( + proto.INT64, + number=4, + ) + recreated_entries: int = proto.Field( + proto.INT64, + number=6, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deleted_entry_links: int = proto.Field( + proto.INT64, + number=7, + ) + created_entry_links: int = proto.Field( + proto.INT64, + number=8, + ) + unchanged_entry_links: int = proto.Field( + proto.INT64, + number=9, + ) + + class ExportJobResult(proto.Message): + r"""Summary results from a metadata export job. The results are a + snapshot of the metadata at the time when the job was created. + The exported entries are saved to a Cloud Storage bucket. + + Attributes: + exported_entries (int): + Output only. The number of entries that were + exported. + error_message (str): + Output only. The error message if the + metadata export job failed. + """ + + exported_entries: int = proto.Field( + proto.INT64, + number=1, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + class ImportJobSpec(proto.Message): + r"""Job specification for a metadata import job. + + You can run the following kinds of metadata import jobs: + + - Full sync of entries with incremental import of their aspects. + Supported for custom entries. + - Incremental import of aspects only. Supported for aspects that + belong to custom entries and system entries. For custom entries, + you can modify both optional aspects and required aspects. For + system entries, you can modify optional aspects. + + Attributes: + source_storage_uri (str): + Optional. The URI of a Cloud Storage bucket or folder + (beginning with ``gs://`` and ending with ``/``) that + contains the metadata import files for this job. + + A metadata import file defines the values to set for each of + the entries and aspects in a metadata import job. For more + information about how to create a metadata import file and + the file requirements, see `Metadata import + file `__. + + You can provide multiple metadata import files in the same + metadata job. The bucket or folder must contain at least one + metadata import file, in JSON Lines format (either ``.json`` + or ``.jsonl`` file extension). + + In ``FULL`` entry sync mode, don't save the metadata import + file in a folder named ``SOURCE_STORAGE_URI/deletions/``. + + **Caution**: If the metadata import file contains no data, + all entries and aspects that belong to the job's scope are + deleted. + source_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the process that + created the metadata import files began. + scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): + Required. A boundary on the scope of impact + that the metadata import job can have. + entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for entries. + aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for aspects. + log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): + Optional. The level of logs to write to Cloud Logging for + this job. + + Debug-level logs provide highly-detailed information for + troubleshooting, but their increased verbosity could incur + `additional + costs `__ that + might not be merited for all jobs. + + If unspecified, defaults to ``INFO``. + """ + class SyncMode(proto.Enum): + r"""Specifies how the entries and aspects in a metadata import job are + updated. For more information, see `Sync + mode `__. + + Values: + SYNC_MODE_UNSPECIFIED (0): + Sync mode unspecified. + FULL (1): + All resources in the job's scope are + modified. If a resource exists in Dataplex + Universal Catalog but isn't included in the + metadata import file, the resource is deleted + when you run the metadata job. Use this mode to + perform a full sync of the set of entries in the + job scope. + + This sync mode is supported for entries. + INCREMENTAL (2): + Only the resources that are explicitly + included in the metadata import file are + modified. Use this mode to modify a subset of + resources while leaving unreferenced resources + unchanged. + + This sync mode is supported for aspects. + NONE (3): + If entry sync mode is ``NONE``, then aspects are modified + according to the aspect sync mode. Other metadata that + belongs to entries in the job's scope isn't modified. + + This sync mode is supported for entries. + """ + SYNC_MODE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + NONE = 3 + + class LogLevel(proto.Enum): + r"""The level of logs to write to Cloud Logging for this job. + + Values: + LOG_LEVEL_UNSPECIFIED (0): + Log level unspecified. + DEBUG (1): + Debug-level logging. Captures detailed logs for each import + item. Use debug-level logging to troubleshoot issues with + specific import items. For example, use debug-level logging + to identify resources that are missing from the job scope, + entries or aspects that don't conform to the associated + entry type or aspect type, or other misconfigurations with + the metadata import file. + + Depending on the size of your metadata job and the number of + logs that are generated, debug-level logging might incur + `additional + costs `__. + INFO (2): + Info-level logging. Captures logs at the + overall job level. Includes aggregate logs about + import items, but doesn't specify which import + item has an error. + """ + LOG_LEVEL_UNSPECIFIED = 0 + DEBUG = 1 + INFO = 2 + + class ImportJobScope(proto.Message): + r"""A boundary on the scope of impact that the metadata import + job can have. + + Attributes: + entry_groups (MutableSequence[str]): + Required. The entry group that is in scope for the import + job, specified as a relative resource name in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. + Only entries and aspects that belong to the specified entry + group are affected by the job. + + Must contain exactly one element. The entry group and the + job must be in the same location. + entry_types (MutableSequence[str]): + Required. The entry types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. + The job modifies only the entries and aspects that belong to + these entry types. + + If the metadata import file attempts to modify an entry + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an entry type must either match the location + of the job, or the entry type must be global. + aspect_types (MutableSequence[str]): + Optional. The aspect types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + The job modifies only the aspects that belong to these + aspect types. + + This field is required when creating an aspect-only import + job. + + If the metadata import file attempts to modify an aspect + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an aspect type must either match the + location of the job, or the aspect type must be global. + glossaries (MutableSequence[str]): + Optional. The glossaries that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/glossaries/{glossary_id}``. + + While importing Business Glossary entries, the user must + provide glossaries. While importing entries, the user does + not have to provide glossaries. If the metadata import file + attempts to modify Business Glossary entries whose glossary + isn't included in this list, the import job will skip those + entries. + + The location of a glossary must either match the location of + the job, or the glossary must be global. + entry_link_types (MutableSequence[str]): + Optional. The entry link types that are in scope for the + import job, specified as relative resource names in the + format + ``projects/{project_number_or_id}/locations/{location_id}/entryLinkTypes/{entry_link_type_id}``. + The job modifies only the entryLinks that belong to these + entry link types. + + If the metadata import file attempts to create or delete an + entry link whose entry link type isn't included in this + list, the import job will skip those entry links. + referenced_entry_scopes (MutableSequence[str]): + Optional. Defines the scope of entries that can be + referenced in the entry links. + + Currently, projects are supported as valid scopes. Format: + ``projects/{project_number_or_id}`` + + If the metadata import file attempts to create an entry link + which references an entry that is not in the scope, the + import job will skip that entry link. + """ + + entry_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entry_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + glossaries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + entry_link_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + referenced_entry_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + source_storage_uri: str = proto.Field( + proto.STRING, + number=1, + ) + source_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + scope: 'MetadataJob.ImportJobSpec.ImportJobScope' = proto.Field( + proto.MESSAGE, + number=2, + message='MetadataJob.ImportJobSpec.ImportJobScope', + ) + entry_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( + proto.ENUM, + number=3, + enum='MetadataJob.ImportJobSpec.SyncMode', + ) + aspect_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( + proto.ENUM, + number=4, + enum='MetadataJob.ImportJobSpec.SyncMode', + ) + log_level: 'MetadataJob.ImportJobSpec.LogLevel' = proto.Field( + proto.ENUM, + number=6, + enum='MetadataJob.ImportJobSpec.LogLevel', + ) + + class ExportJobSpec(proto.Message): + r"""Job specification for a metadata export job. + + Attributes: + scope (google.cloud.dataplex_v1.types.MetadataJob.ExportJobSpec.ExportJobScope): + Required. The scope of the export job. + output_path (str): + Required. The root path of the Cloud Storage bucket to + export the metadata to, in the format ``gs://{bucket}/``. + You can optionally specify a custom prefix after the bucket + name, in the format ``gs://{bucket}/{prefix}/``. The maximum + length of the custom prefix is 128 characters. Dataplex + Universal Catalog constructs the object path for the + exported files by using the bucket name and prefix that you + provide, followed by a system-generated path. + + The bucket must be in the same VPC Service Controls + perimeter as the job. + """ + + class ExportJobScope(proto.Message): + r"""The scope of the export job. + + Attributes: + organization_level (bool): + Whether the metadata export job is an organization-level + export job. + + - If ``true``, the job exports the entries from the same + organization and VPC Service Controls perimeter as the + job. The project that the job belongs to determines the + VPC Service Controls perimeter. If you set the job scope + to be at the organization level, then don't provide a list + of projects or entry groups. + - If ``false``, you must specify a list of projects or a + list of entry groups whose entries you want to export. + + The default is ``false``. + projects (MutableSequence[str]): + The projects whose metadata you want to export, in the + format ``projects/{project_id_or_number}``. Only the entries + from the specified projects are exported. + + The projects must be in the same organization and VPC + Service Controls perimeter as the job. + + If you set the job scope to be a list of projects, then set + the organization-level export flag to false and don't + provide a list of entry groups. + entry_groups (MutableSequence[str]): + The entry groups whose metadata you want to export, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + Only the entries in the specified entry groups are exported. + + The entry groups must be in the same location and the same + VPC Service Controls perimeter as the job. + + If you set the job scope to be a list of entry groups, then + set the organization-level export flag to false and don't + provide a list of projects. + entry_types (MutableSequence[str]): + The entry types that are in scope for the export job, + specified as relative resource names in the format + ``projects/{project_id_or_number}/locations/{location}/entryTypes/{entry_type_id}``. + Only entries that belong to the specified entry types are + affected by the job. + aspect_types (MutableSequence[str]): + The aspect types that are in scope for the export job, + specified as relative resource names in the format + ``projects/{project_id_or_number}/locations/{location}/aspectTypes/{aspect_type_id}``. + Only aspects that belong to the specified aspect types are + affected by the job. + """ + + organization_level: bool = proto.Field( + proto.BOOL, + number=1, + ) + projects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + entry_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + entry_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + scope: 'MetadataJob.ExportJobSpec.ExportJobScope' = proto.Field( + proto.MESSAGE, + number=2, + message='MetadataJob.ExportJobSpec.ExportJobScope', + ) + output_path: str = proto.Field( + proto.STRING, + number=3, + ) + + class Status(proto.Message): + r"""Metadata job status. + + Attributes: + state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): + Output only. State of the metadata job. + message (str): + Output only. Message relating to the + progression of a metadata job. + completion_percent (int): + Output only. Progress tracking. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + class State(proto.Enum): + r"""State of a metadata job. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + QUEUED (1): + The job is queued. + RUNNING (2): + The job is running. + CANCELING (3): + The job is being canceled. + CANCELED (4): + The job is canceled. + SUCCEEDED (5): + The job succeeded. + FAILED (6): + The job failed. + SUCCEEDED_WITH_ERRORS (7): + The job completed with some errors. + """ + STATE_UNSPECIFIED = 0 + QUEUED = 1 + RUNNING = 2 + CANCELING = 3 + CANCELED = 4 + SUCCEEDED = 5 + FAILED = 6 + SUCCEEDED_WITH_ERRORS = 7 + + state: 'MetadataJob.Status.State' = proto.Field( + proto.ENUM, + number=1, + enum='MetadataJob.Status.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + completion_percent: int = proto.Field( + proto.INT32, + number=3, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + import_spec: ImportJobSpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=ImportJobSpec, + ) + export_spec: ExportJobSpec = proto.Field( + proto.MESSAGE, + number=101, + oneof='spec', + message=ExportJobSpec, + ) + import_result: ImportJobResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=ImportJobResult, + ) + export_result: ExportJobResult = proto.Field( + proto.MESSAGE, + number=201, + oneof='result', + message=ExportJobResult, + ) + status: Status = proto.Field( + proto.MESSAGE, + number=7, + message=Status, + ) + + +class EntryLink(proto.Message): + r"""EntryLink represents a link between two Entries. + + Attributes: + name (str): + Output only. Immutable. Identifier. The relative resource + name of the Entry Link, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}`` + entry_link_type (str): + Required. Immutable. Relative resource name of the Entry + Link Type used to create this Entry Link. For example: + + - Entry link between synonym terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/synonym`` + - Entry link between related terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/related`` + - Entry link between glossary terms and data assets: + ``projects/dataplex-types/locations/global/entryLinkTypes/definition`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + last updated. + entry_references (MutableSequence[google.cloud.dataplex_v1.types.EntryLink.EntryReference]): + Required. Specifies the Entries referenced in + the Entry Link. There should be exactly two + entry references. + """ + + class EntryReference(proto.Message): + r"""Reference to the Entry that is linked through the Entry Link. + + Attributes: + name (str): + Required. Immutable. The relative resource name of the + referenced Entry, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` + path (str): + Immutable. The path in the Entry that is + referenced in the Entry Link. Empty path denotes + that the Entry itself is referenced in the Entry + Link. + type_ (google.cloud.dataplex_v1.types.EntryLink.EntryReference.Type): + Required. Immutable. The reference type of + the Entry. + """ + class Type(proto.Enum): + r"""Reference type of the Entry. + + Values: + UNSPECIFIED (0): + Unspecified reference type. Implies that the + Entry is referenced in a non-directional Entry + Link. + SOURCE (2): + The Entry is referenced as the source of the + directional Entry Link. + TARGET (3): + The Entry is referenced as the target of the + directional Entry Link. + """ + UNSPECIFIED = 0 + SOURCE = 2 + TARGET = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + type_: 'EntryLink.EntryReference.Type' = proto.Field( + proto.ENUM, + number=3, + enum='EntryLink.EntryReference.Type', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_type: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + entry_references: MutableSequence[EntryReference] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=EntryReference, + ) + + +class CreateEntryLinkRequest(proto.Message): + r"""Request message for CreateEntryLink. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_link: 'EntryLink' = proto.Field( + proto.MESSAGE, + number=3, + message='EntryLink', + ) + + +class DeleteEntryLinkRequest(proto.Message): + r"""Request message for DeleteEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEntryLinkRequest(proto.Message): + r"""Request message for GetEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py new file mode 100644 index 000000000000..ceab2bf749ad --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'EncryptionConfig', + 'CreateEncryptionConfigRequest', + 'GetEncryptionConfigRequest', + 'UpdateEncryptionConfigRequest', + 'DeleteEncryptionConfigRequest', + 'ListEncryptionConfigsRequest', + 'ListEncryptionConfigsResponse', + }, +) + + +class EncryptionConfig(proto.Message): + r"""A Resource designed to manage encryption configurations for + customers to support Customer Managed Encryption Keys (CMEK). + + Attributes: + name (str): + Identifier. The resource name of the EncryptionConfig. + Format: + organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config} + Global location is not supported. + key (str): + Optional. If a key is chosen, it means that + the customer is using CMEK. If a key is not + chosen, it means that the customer is using + Google managed encryption. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Encryption + configuration was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Encryption + configuration was last updated. + encryption_state (google.cloud.dataplex_v1.types.EncryptionConfig.EncryptionState): + Output only. The state of encryption of the + databases. + etag (str): + Etag of the EncryptionConfig. This is a + strong etag. + failure_details (google.cloud.dataplex_v1.types.EncryptionConfig.FailureDetails): + Output only. Details of the failure if + anything related to Cmek db fails. + """ + class EncryptionState(proto.Enum): + r"""State of encryption of the databases when EncryptionConfig is + created or updated. + + Values: + ENCRYPTION_STATE_UNSPECIFIED (0): + State is not specified. + ENCRYPTING (1): + The encryption state of the database when the + EncryptionConfig is created or updated. If the + encryption fails, it is retried indefinitely and + the state is shown as ENCRYPTING. + COMPLETED (2): + The encryption of data has completed + successfully. + FAILED (3): + The encryption of data has failed. + The state is set to FAILED when the encryption + fails due to reasons like permission issues, + invalid key etc. + """ + ENCRYPTION_STATE_UNSPECIFIED = 0 + ENCRYPTING = 1 + COMPLETED = 2 + FAILED = 3 + + class FailureDetails(proto.Message): + r"""Details of the failure if anything related to Cmek db fails. + + Attributes: + error_code (google.cloud.dataplex_v1.types.EncryptionConfig.FailureDetails.ErrorCode): + Output only. The error code for the failure. + error_message (str): + Output only. The error message will be shown to the user. + Set only if the error code is REQUIRE_USER_ACTION. + """ + class ErrorCode(proto.Enum): + r"""Error code for the failure if anything related to Cmek db + fails. + + Values: + UNKNOWN (0): + The error code is not specified + INTERNAL_ERROR (1): + Error because of internal server error, will + be retried automatically. + REQUIRE_USER_ACTION (2): + User action is required to resolve the error. + """ + UNKNOWN = 0 + INTERNAL_ERROR = 1 + REQUIRE_USER_ACTION = 2 + + error_code: 'EncryptionConfig.FailureDetails.ErrorCode' = proto.Field( + proto.ENUM, + number=1, + enum='EncryptionConfig.FailureDetails.ErrorCode', + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + key: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + encryption_state: EncryptionState = proto.Field( + proto.ENUM, + number=5, + enum=EncryptionState, + ) + etag: str = proto.Field( + proto.STRING, + number=6, + ) + failure_details: FailureDetails = proto.Field( + proto.MESSAGE, + number=7, + message=FailureDetails, + ) + + +class CreateEncryptionConfigRequest(proto.Message): + r"""Create EncryptionConfig Request + + Attributes: + parent (str): + Required. The location at which the + EncryptionConfig is to be created. + encryption_config_id (str): + Required. The ID of the + [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] + to create. Currently, only a value of "default" is + supported. + encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): + Required. The EncryptionConfig to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + encryption_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + encryption_config: 'EncryptionConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='EncryptionConfig', + ) + + +class GetEncryptionConfigRequest(proto.Message): + r"""Get EncryptionConfig Request + + Attributes: + name (str): + Required. The name of the EncryptionConfig to + fetch. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateEncryptionConfigRequest(proto.Message): + r"""Update EncryptionConfig Request + + Attributes: + encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): + Required. The EncryptionConfig to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. + The service treats an omitted field mask as an + implied field mask equivalent to all fields that + are populated (have a non-empty value). + """ + + encryption_config: 'EncryptionConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='EncryptionConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEncryptionConfigRequest(proto.Message): + r"""Delete EncryptionConfig Request + + Attributes: + name (str): + Required. The name of the EncryptionConfig to + delete. + etag (str): + Optional. Etag of the EncryptionConfig. This + is a strong etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEncryptionConfigsRequest(proto.Message): + r"""List EncryptionConfigs Request + + Attributes: + parent (str): + Required. The location for which the + EncryptionConfig is to be listed. + page_size (int): + Optional. Maximum number of EncryptionConfigs + to return. The service may return fewer than + this value. If unspecified, at most 10 + EncryptionConfigs will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEncryptionConfigs`` call. Provide this to retrieve the + subsequent page. When paginating, the parameters - filter + and order_by provided to ``ListEncryptionConfigs`` must + match the call that provided the page token. + filter (str): + Optional. Filter the EncryptionConfigs to be returned. Using + bare literals: (These values will be matched anywhere it may + appear in the object's field values) + + - filter=some_value Using fields: (These values will be + matched only in the specified field) + - filter=some_field=some_value Supported fields: + - name, key, create_time, update_time, encryption_state + Example: + - filter=name=organizations/123/locations/us-central1/encryptionConfigs/test-config + conjunctions: (AND, OR, NOT) + - filter=name=organizations/123/locations/us-central1/encryptionConfigs/test-config + AND mode=CMEK logical operators: (>, <, >=, <=, !=, =, :), + - filter=create_time>2024-05-01T00:00:00.000Z + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEncryptionConfigsResponse(proto.Message): + r"""List EncryptionConfigs Response + + Attributes: + encryption_configs (MutableSequence[google.cloud.dataplex_v1.types.EncryptionConfig]): + The list of EncryptionConfigs under the given + parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + encryption_configs: MutableSequence['EncryptionConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EncryptionConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py new file mode 100644 index 000000000000..4519a7b7e7ac --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'CreateContentRequest', + 'UpdateContentRequest', + 'DeleteContentRequest', + 'ListContentRequest', + 'ListContentResponse', + 'GetContentRequest', + }, +) + + +class CreateContentRequest(proto.Message): + r"""Create content request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + content: analyze.Content = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Content, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateContentRequest(proto.Message): + r"""Update content request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + content: analyze.Content = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Content, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteContentRequest(proto.Message): + r"""Delete content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListContentRequest(proto.Message): + r"""List content request. Returns the BASIC Content view. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + page_size (int): + Optional. Maximum number of content to + return. The service may return fewer than this + value. If unspecified, at most 10 content will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListContent`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListContent`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + following formats are supported: + + labels.key1 = "value1" labels:key1 type = "NOTEBOOK" type = + "SQL_SCRIPT" + + These restrictions can be coinjoined with AND, OR and NOT + conjunctions. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListContentResponse(proto.Message): + r"""List content response. + + Attributes: + content (MutableSequence[google.cloud.dataplex_v1.types.Content]): + Content under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + content: MutableSequence[analyze.Content] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Content, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetContentRequest(proto.Message): + r"""Get content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + view (google.cloud.dataplex_v1.types.GetContentRequest.ContentView): + Optional. Specify content view to make a + partial request. + """ + class ContentView(proto.Enum): + r"""Specifies whether the request should return the full or the + partial representation. + + Values: + CONTENT_VIEW_UNSPECIFIED (0): + Content view not specified. Defaults to + BASIC. The API will default to the BASIC view. + BASIC (1): + Will not return the ``data_text`` field. + FULL (2): + Returns the complete proto. + """ + CONTENT_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: ContentView = proto.Field( + proto.ENUM, + number=2, + enum=ContentView, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py new file mode 100644 index 000000000000..a6d48d618a32 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py @@ -0,0 +1,364 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataDiscoverySpec', + 'DataDiscoveryResult', + }, +) + + +class DataDiscoverySpec(proto.Message): + r"""Spec for a data discovery scan. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bigquery_publishing_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig): + Optional. Configuration for metadata + publishing. + storage_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig): + Cloud Storage related configurations. + + This field is a member of `oneof`_ ``resource_config``. + """ + + class BigQueryPublishingConfig(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + table_type (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig.TableType): + Optional. Determines whether to publish + discovered tables as BigLake external tables or + non-BigLake external tables. + connection (str): + Optional. The BigQuery connection used to create BigLake + tables. Must be in the form + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + location (str): + Optional. The location of the BigQuery dataset to publish + BigLake external or non-BigLake external tables to. + + 1. If the Cloud Storage bucket is located in a multi-region + bucket, then BigQuery dataset can be in the same + multi-region bucket or any single region that is included + in the same multi-region bucket. The datascan can be + created in any single region that is included in the same + multi-region bucket + 2. If the Cloud Storage bucket is located in a dual-region + bucket, then BigQuery dataset can be located in regions + that are included in the dual-region bucket, or in a + multi-region that includes the dual-region. The datascan + can be created in any single region that is included in + the same dual-region bucket. + 3. If the Cloud Storage bucket is located in a single + region, then BigQuery dataset can be in the same single + region or any multi-region bucket that includes the same + single region. The datascan will be created in the same + single region as the bucket. + 4. If the BigQuery dataset is in single region, it must be + in the same single region as the datascan. + + For supported values, refer to + https://cloud.google.com/bigquery/docs/locations#supported_locations. + project (str): + Optional. The project of the BigQuery dataset to publish + BigLake external or non-BigLake external tables to. If not + specified, the project of the Cloud Storage bucket will be + used. The format is "projects/{project_id_or_number}". + """ + class TableType(proto.Enum): + r"""Determines how discovered tables are published. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + Table type unspecified. + EXTERNAL (1): + Default. Discovered tables are published as + BigQuery external tables whose data is accessed + using the credentials of the user querying the + table. + BIGLAKE (2): + Discovered tables are published as BigLake + external tables whose data is accessed using the + credentials of the associated BigQuery + connection. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL = 1 + BIGLAKE = 2 + + table_type: 'DataDiscoverySpec.BigQueryPublishingConfig.TableType' = proto.Field( + proto.ENUM, + number=2, + enum='DataDiscoverySpec.BigQueryPublishingConfig.TableType', + ) + connection: str = proto.Field( + proto.STRING, + number=3, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + + class StorageConfig(proto.Message): + r"""Configurations related to Cloud Storage as the data source. + + Attributes: + include_patterns (MutableSequence[str]): + Optional. Defines the data to include during + discovery when only a subset of the data should + be considered. Provide a list of patterns that + identify the data to include. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + exclude_patterns (MutableSequence[str]): + Optional. Defines the data to exclude during + discovery. Provide a list of patterns that + identify the data to exclude. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + csv_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.JsonOptions): + Optional. Configuration for JSON data. + """ + + class CsvOptions(proto.Message): + r"""Describes CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter that is used to separate values. The + default is ``,`` (comma). + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for CSV data. If true, all columns + are registered as strings. + quote (str): + Optional. The character used to quote column values. Accepts + ``"`` (double quotation mark) or ``'`` (single quotation + mark). If unspecified, defaults to ``"`` (double quotation + mark). + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + quote: str = proto.Field( + proto.STRING, + number=5, + ) + + class JsonOptions(proto.Message): + r"""Describes JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for JSON data. If true, all columns + are registered as their primitive types + (strings, number, or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + csv_options: 'DataDiscoverySpec.StorageConfig.CsvOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='DataDiscoverySpec.StorageConfig.CsvOptions', + ) + json_options: 'DataDiscoverySpec.StorageConfig.JsonOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='DataDiscoverySpec.StorageConfig.JsonOptions', + ) + + bigquery_publishing_config: BigQueryPublishingConfig = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishingConfig, + ) + storage_config: StorageConfig = proto.Field( + proto.MESSAGE, + number=100, + oneof='resource_config', + message=StorageConfig, + ) + + +class DataDiscoveryResult(proto.Message): + r"""The output of a data discovery scan. + + Attributes: + bigquery_publishing (google.cloud.dataplex_v1.types.DataDiscoveryResult.BigQueryPublishing): + Output only. Configuration for metadata + publishing. + scan_statistics (google.cloud.dataplex_v1.types.DataDiscoveryResult.ScanStatistics): + Output only. Describes result statistics of a + data scan discovery job. + """ + + class BigQueryPublishing(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + dataset (str): + Output only. The BigQuery dataset the + discovered tables are published to. + location (str): + Output only. The location of the BigQuery + publishing dataset. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + class ScanStatistics(proto.Message): + r"""Describes result statistics of a data scan discovery job. + + Attributes: + scanned_file_count (int): + The number of files scanned. + data_processed_bytes (int): + The data processed in bytes. + files_excluded (int): + The number of files excluded. + tables_created (int): + The number of tables created. + tables_deleted (int): + The number of tables deleted. + tables_updated (int): + The number of tables updated. + filesets_created (int): + The number of filesets created. + filesets_deleted (int): + The number of filesets deleted. + filesets_updated (int): + The number of filesets updated. + """ + + scanned_file_count: int = proto.Field( + proto.INT32, + number=1, + ) + data_processed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + files_excluded: int = proto.Field( + proto.INT32, + number=3, + ) + tables_created: int = proto.Field( + proto.INT32, + number=4, + ) + tables_deleted: int = proto.Field( + proto.INT32, + number=5, + ) + tables_updated: int = proto.Field( + proto.INT32, + number=6, + ) + filesets_created: int = proto.Field( + proto.INT32, + number=7, + ) + filesets_deleted: int = proto.Field( + proto.INT32, + number=8, + ) + filesets_updated: int = proto.Field( + proto.INT32, + number=9, + ) + + bigquery_publishing: BigQueryPublishing = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishing, + ) + scan_statistics: ScanStatistics = proto.Field( + proto.MESSAGE, + number=2, + message=ScanStatistics, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py new file mode 100644 index 000000000000..cd7c5ce8369f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py @@ -0,0 +1,546 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import processing + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataProfileSpec', + 'DataProfileResult', + }, +) + + +class DataProfileSpec(proto.Message): + r"""DataProfileScan related setting. + + Attributes: + sampling_percent (float): + Optional. The percentage of the records to be selected from + the dataset for DataScan. + + - Value can range between 0.0 and 100.0 with up to 3 + significant decimal digits. + - Sampling is not applied if ``sampling_percent`` is not + specified, 0 or + + 100. + row_filter (str): + Optional. A filter applied to all rows in a + single DataScan job. The filter needs to be a + valid SQL expression for a WHERE clause in + BigQuery standard SQL syntax. + Example: col1 >= 0 AND col2 < 10 + post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): + Optional. Actions to take upon job + completion.. + include_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): + Optional. The fields to include in data profile. + + If not specified, all fields at the time of profile scan job + execution are included, except for ones listed in + ``exclude_fields``. + exclude_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): + Optional. The fields to exclude from data profile. + + If specified, the fields will be excluded from data profile, + regardless of ``include_fields`` value. + """ + + class PostScanActions(proto.Message): + r"""The configuration of post scan actions of DataProfileScan + job. + + Attributes: + bigquery_export (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions.BigQueryExport): + Optional. If set, results will be exported to + the provided BigQuery table. + """ + + class BigQueryExport(proto.Message): + r"""The configuration of BigQuery export post scan action. + + Attributes: + results_table (str): + Optional. The BigQuery table to export DataProfileScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + """ + + results_table: str = proto.Field( + proto.STRING, + number=1, + ) + + bigquery_export: 'DataProfileSpec.PostScanActions.BigQueryExport' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileSpec.PostScanActions.BigQueryExport', + ) + + class SelectedFields(proto.Message): + r"""The specification for fields to include or exclude in data + profile scan. + + Attributes: + field_names (MutableSequence[str]): + Optional. Expected input is a list of fully + qualified names of fields as in the schema. + + Only top-level field names for nested fields are + supported. For instance, if 'x' is of nested + field type, listing 'x' is supported but 'x.y.z' + is not supported. Here 'y' and 'y.z' are nested + fields of 'x'. + """ + + field_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=2, + ) + row_filter: str = proto.Field( + proto.STRING, + number=3, + ) + post_scan_actions: PostScanActions = proto.Field( + proto.MESSAGE, + number=4, + message=PostScanActions, + ) + include_fields: SelectedFields = proto.Field( + proto.MESSAGE, + number=5, + message=SelectedFields, + ) + exclude_fields: SelectedFields = proto.Field( + proto.MESSAGE, + number=6, + message=SelectedFields, + ) + + +class DataProfileResult(proto.Message): + r"""DataProfileResult defines the output of DataProfileScan. Each + field of the table will have field type specific profile result. + + Attributes: + row_count (int): + Output only. The count of rows scanned. + profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): + Output only. The profile information per + field. + scanned_data (google.cloud.dataplex_v1.types.ScannedData): + Output only. The data scanned for this + result. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): + Output only. The result of post scan actions. + """ + + class Profile(proto.Message): + r"""Contains name, type, mode and field type specific profile + information. + + Attributes: + fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): + Output only. List of fields with structural + and profile information for each field. + """ + + class Field(proto.Message): + r"""A field within a table. + + Attributes: + name (str): + Output only. The name of the field. + type_ (str): + Output only. The data type retrieved from the schema of the + data source. For instance, for a BigQuery native table, it + is the `BigQuery Table + Schema `__. + For a Dataplex Universal Catalog Entity, it is the `Entity + Schema `__. + mode (str): + Output only. The mode of the field. Possible values include: + + - REQUIRED, if it is a required field. + - NULLABLE, if it is an optional field. + - REPEATED, if it is a repeated field. + profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): + Output only. Profile information for the + corresponding field. + """ + + class ProfileInfo(proto.Message): + r"""The profile information for each field type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + null_ratio (float): + Output only. Ratio of rows with null value + against total scanned rows. + distinct_ratio (float): + Output only. Ratio of rows with distinct + values against total scanned rows. Not available + for complex non-groupable field type, including + RECORD, ARRAY, GEOGRAPHY, and JSON, as well as + fields with REPEATABLE mode. + top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): + Output only. The list of top N non-null + values, frequency and ratio with which they + occur in the scanned data. N is 10 or equal to + the number of distinct values in the field, + whichever is smaller. Not available for complex + non-groupable field type, including RECORD, + ARRAY, GEOGRAPHY, and JSON, as well as fields + with REPEATABLE mode. + string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): + String type field information. + + This field is a member of `oneof`_ ``field_info``. + integer_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo): + Integer type field information. + + This field is a member of `oneof`_ ``field_info``. + double_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo): + Double type field information. + + This field is a member of `oneof`_ ``field_info``. + """ + + class StringFieldInfo(proto.Message): + r"""The profile information for a string type field. + + Attributes: + min_length (int): + Output only. Minimum length of non-null + values in the scanned data. + max_length (int): + Output only. Maximum length of non-null + values in the scanned data. + average_length (float): + Output only. Average length of non-null + values in the scanned data. + """ + + min_length: int = proto.Field( + proto.INT64, + number=1, + ) + max_length: int = proto.Field( + proto.INT64, + number=2, + ) + average_length: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class IntegerFieldInfo(proto.Message): + r"""The profile information for an integer type field. + + Attributes: + average (float): + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. + standard_deviation (float): + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. + min_ (int): + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. + quartiles (MutableSequence[int]): + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + approximate quartile values for the scanned + data, occurring in order Q1, median, Q3. + max_ (int): + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. + """ + + average: float = proto.Field( + proto.DOUBLE, + number=1, + ) + standard_deviation: float = proto.Field( + proto.DOUBLE, + number=3, + ) + min_: int = proto.Field( + proto.INT64, + number=4, + ) + quartiles: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=6, + ) + max_: int = proto.Field( + proto.INT64, + number=5, + ) + + class DoubleFieldInfo(proto.Message): + r"""The profile information for a double type field. + + Attributes: + average (float): + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. + standard_deviation (float): + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. + min_ (float): + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. + quartiles (MutableSequence[float]): + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + quartile values for the scanned data, occurring + in order Q1, median, Q3. + max_ (float): + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. + """ + + average: float = proto.Field( + proto.DOUBLE, + number=1, + ) + standard_deviation: float = proto.Field( + proto.DOUBLE, + number=3, + ) + min_: float = proto.Field( + proto.DOUBLE, + number=4, + ) + quartiles: MutableSequence[float] = proto.RepeatedField( + proto.DOUBLE, + number=6, + ) + max_: float = proto.Field( + proto.DOUBLE, + number=5, + ) + + class TopNValue(proto.Message): + r"""Top N non-null values in the scanned data. + + Attributes: + value (str): + Output only. String value of a top N non-null + value. + count (int): + Output only. Count of the corresponding value + in the scanned data. + ratio (float): + Output only. Ratio of the corresponding value + in the field against the total number of rows in + the scanned data. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + ratio: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + null_ratio: float = proto.Field( + proto.DOUBLE, + number=2, + ) + distinct_ratio: float = proto.Field( + proto.DOUBLE, + number=3, + ) + top_n_values: MutableSequence['DataProfileResult.Profile.Field.ProfileInfo.TopNValue'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='DataProfileResult.Profile.Field.ProfileInfo.TopNValue', + ) + string_profile: 'DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo' = proto.Field( + proto.MESSAGE, + number=101, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo', + ) + integer_profile: 'DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo' = proto.Field( + proto.MESSAGE, + number=102, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo', + ) + double_profile: 'DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo' = proto.Field( + proto.MESSAGE, + number=103, + oneof='field_info', + message='DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + profile: 'DataProfileResult.Profile.Field.ProfileInfo' = proto.Field( + proto.MESSAGE, + number=4, + message='DataProfileResult.Profile.Field.ProfileInfo', + ) + + fields: MutableSequence['DataProfileResult.Profile.Field'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataProfileResult.Profile.Field', + ) + + class PostScanActionsResult(proto.Message): + r"""The result of post scan actions of DataProfileScan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult): + Output only. The result of BigQuery export + post scan action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult.State): + Output only. Execution state for the BigQuery + exporting. + message (str): + Output only. Additional information about the + BigQuery exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataProfileResult.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataProfileResult.PostScanActionsResult.BigQueryExportResult', + ) + + row_count: int = proto.Field( + proto.INT64, + number=3, + ) + profile: Profile = proto.Field( + proto.MESSAGE, + number=4, + message=Profile, + ) + scanned_data: processing.ScannedData = proto.Field( + proto.MESSAGE, + number=5, + message=processing.ScannedData, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=6, + message=PostScanActionsResult, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py new file mode 100644 index 000000000000..20217defc1a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py @@ -0,0 +1,962 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import datascans_common +from google.cloud.dataplex_v1.types import processing + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataQualitySpec', + 'DataQualityResult', + 'DataQualityRuleResult', + 'DataQualityDimensionResult', + 'DataQualityDimension', + 'DataQualityRule', + 'DataQualityColumnResult', + }, +) + + +class DataQualitySpec(proto.Message): + r"""DataQualityScan related setting. + + Attributes: + rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): + Required. The list of rules to evaluate + against a data source. At least one rule is + required. + sampling_percent (float): + Optional. The percentage of the records to be selected from + the dataset for DataScan. + + - Value can range between 0.0 and 100.0 with up to 3 + significant decimal digits. + - Sampling is not applied if ``sampling_percent`` is not + specified, 0 or + + 100. + row_filter (str): + Optional. A filter applied to all rows in a single DataScan + job. The filter needs to be a valid SQL expression for a + `WHERE clause in GoogleSQL + syntax `__. + + Example: col1 >= 0 AND col2 < 10 + post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): + Optional. Actions to take upon job + completion. + catalog_publishing_enabled (bool): + Optional. If set, the latest DataScan job + result will be published as Dataplex Universal + Catalog metadata. + """ + + class PostScanActions(proto.Message): + r"""The configuration of post scan actions of DataQualityScan. + + Attributes: + bigquery_export (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.BigQueryExport): + Optional. If set, results will be exported to + the provided BigQuery table. + notification_report (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.NotificationReport): + Optional. If set, results will be sent to the + provided notification receipts upon triggers. + """ + + class BigQueryExport(proto.Message): + r"""The configuration of BigQuery export post scan action. + + Attributes: + results_table (str): + Optional. The BigQuery table to export DataQualityScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + """ + + results_table: str = proto.Field( + proto.STRING, + number=1, + ) + + class Recipients(proto.Message): + r"""The individuals or groups who are designated to receive + notifications upon triggers. + + Attributes: + emails (MutableSequence[str]): + Optional. The email recipients who will + receive the DataQualityScan results report. + """ + + emails: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class ScoreThresholdTrigger(proto.Message): + r"""This trigger is triggered when the DQ score in the job result + is less than a specified input score. + + Attributes: + score_threshold (float): + Optional. The score range is in [0,100]. + """ + + score_threshold: float = proto.Field( + proto.FLOAT, + number=2, + ) + + class JobFailureTrigger(proto.Message): + r"""This trigger is triggered when the scan job itself fails, + regardless of the result. + + """ + + class JobEndTrigger(proto.Message): + r"""This trigger is triggered whenever a scan job run ends, + regardless of the result. + + """ + + class NotificationReport(proto.Message): + r"""The configuration of notification report post scan action. + + Attributes: + recipients (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.Recipients): + Required. The recipients who will receive the + notification report. + score_threshold_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.ScoreThresholdTrigger): + Optional. If set, report will be sent when + score threshold is met. + job_failure_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobFailureTrigger): + Optional. If set, report will be sent when a + scan job fails. + job_end_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobEndTrigger): + Optional. If set, report will be sent when a + scan job ends. + """ + + recipients: 'DataQualitySpec.PostScanActions.Recipients' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualitySpec.PostScanActions.Recipients', + ) + score_threshold_trigger: 'DataQualitySpec.PostScanActions.ScoreThresholdTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='DataQualitySpec.PostScanActions.ScoreThresholdTrigger', + ) + job_failure_trigger: 'DataQualitySpec.PostScanActions.JobFailureTrigger' = proto.Field( + proto.MESSAGE, + number=4, + message='DataQualitySpec.PostScanActions.JobFailureTrigger', + ) + job_end_trigger: 'DataQualitySpec.PostScanActions.JobEndTrigger' = proto.Field( + proto.MESSAGE, + number=5, + message='DataQualitySpec.PostScanActions.JobEndTrigger', + ) + + bigquery_export: 'DataQualitySpec.PostScanActions.BigQueryExport' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualitySpec.PostScanActions.BigQueryExport', + ) + notification_report: 'DataQualitySpec.PostScanActions.NotificationReport' = proto.Field( + proto.MESSAGE, + number=2, + message='DataQualitySpec.PostScanActions.NotificationReport', + ) + + rules: MutableSequence['DataQualityRule'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataQualityRule', + ) + sampling_percent: float = proto.Field( + proto.FLOAT, + number=4, + ) + row_filter: str = proto.Field( + proto.STRING, + number=5, + ) + post_scan_actions: PostScanActions = proto.Field( + proto.MESSAGE, + number=6, + message=PostScanActions, + ) + catalog_publishing_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) + + +class DataQualityResult(proto.Message): + r"""The output of a DataQualityScan. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + passed (bool): + Output only. Overall data quality result -- ``true`` if all + rules passed. + score (float): + Output only. The overall data quality score. + + The score ranges between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): + Output only. A list of results at the dimension level. + + A dimension will have a corresponding + ``DataQualityDimensionResult`` if and only if there is at + least one rule with the 'dimension' field set to it. + columns (MutableSequence[google.cloud.dataplex_v1.types.DataQualityColumnResult]): + Output only. A list of results at the column level. + + A column will have a corresponding + ``DataQualityColumnResult`` if and only if there is at least + one rule with the 'column' field set to it. + rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRuleResult]): + Output only. A list of all the rules in a + job, and their results. + row_count (int): + Output only. The count of rows processed. + scanned_data (google.cloud.dataplex_v1.types.ScannedData): + Output only. The data scanned for this + result. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): + Output only. The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + Output only. The status of publishing the + data scan as Dataplex Universal Catalog + metadata. + """ + + class PostScanActionsResult(proto.Message): + r"""The result of post scan actions of DataQualityScan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult): + Output only. The result of BigQuery export + post scan action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult.State): + Output only. Execution state for the BigQuery + exporting. + message (str): + Output only. Additional information about the + BigQuery exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataQualityResult.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityResult.PostScanActionsResult.BigQueryExportResult', + ) + + passed: bool = proto.Field( + proto.BOOL, + number=5, + ) + score: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataQualityDimensionResult', + ) + columns: MutableSequence['DataQualityColumnResult'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='DataQualityColumnResult', + ) + rules: MutableSequence['DataQualityRuleResult'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='DataQualityRuleResult', + ) + row_count: int = proto.Field( + proto.INT64, + number=4, + ) + scanned_data: processing.ScannedData = proto.Field( + proto.MESSAGE, + number=7, + message=processing.ScannedData, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=8, + message=PostScanActionsResult, + ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = proto.Field( + proto.MESSAGE, + number=11, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + + +class DataQualityRuleResult(proto.Message): + r"""DataQualityRuleResult provides a more detailed, per-rule view + of the results. + + Attributes: + rule (google.cloud.dataplex_v1.types.DataQualityRule): + Output only. The rule specified in the + DataQualitySpec, as is. + passed (bool): + Output only. Whether the rule passed or + failed. + evaluated_count (int): + Output only. The number of rows a rule was evaluated + against. + + This field is only valid for row-level type rules. + + Evaluated count can be configured to either + + - include all rows (default) - with ``null`` rows + automatically failing rule evaluation, or + - exclude ``null`` rows from the ``evaluated_count``, by + setting ``ignore_nulls = true``. + + This field is not set for rule SqlAssertion. + passed_count (int): + Output only. The number of rows which passed + a rule evaluation. + This field is only valid for row-level type + rules. + + This field is not set for rule SqlAssertion. + null_count (int): + Output only. The number of rows with null + values in the specified column. + pass_ratio (float): + Output only. The ratio of **passed_count / + evaluated_count**. + + This field is only valid for row-level type rules. + failing_rows_query (str): + Output only. The query to find rows that did + not pass this rule. + This field is only valid for row-level type + rules. + assertion_row_count (int): + Output only. The number of rows returned by + the SQL statement in a SQL assertion rule. + + This field is only valid for SQL assertion + rules. + """ + + rule: 'DataQualityRule' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityRule', + ) + passed: bool = proto.Field( + proto.BOOL, + number=7, + ) + evaluated_count: int = proto.Field( + proto.INT64, + number=9, + ) + passed_count: int = proto.Field( + proto.INT64, + number=8, + ) + null_count: int = proto.Field( + proto.INT64, + number=5, + ) + pass_ratio: float = proto.Field( + proto.DOUBLE, + number=6, + ) + failing_rows_query: str = proto.Field( + proto.STRING, + number=10, + ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=11, + ) + + +class DataQualityDimensionResult(proto.Message): + r"""DataQualityDimensionResult provides a more detailed, + per-dimension view of the results. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (google.cloud.dataplex_v1.types.DataQualityDimension): + Output only. The dimension config specified + in the DataQualitySpec, as is. + passed (bool): + Output only. Whether the dimension passed or + failed. + score (float): + Output only. The dimension-level data quality score for this + data scan job if and only if the 'dimension' field is set. + + The score ranges between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + """ + + dimension: 'DataQualityDimension' = proto.Field( + proto.MESSAGE, + number=1, + message='DataQualityDimension', + ) + passed: bool = proto.Field( + proto.BOOL, + number=3, + ) + score: float = proto.Field( + proto.FLOAT, + number=4, + optional=True, + ) + + +class DataQualityDimension(proto.Message): + r"""A dimension captures data quality intent about a defined + subset of the rules specified. + + Attributes: + name (str): + Output only. The dimension name a rule + belongs to. Custom dimension name is supported + with all uppercase letters and maximum length of + 30 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataQualityRule(proto.Message): + r"""A rule captures data quality intent about a data source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RangeExpectation): + Row-level rule which evaluates whether each + column value lies between a specified range. + + This field is a member of `oneof`_ ``rule_type``. + non_null_expectation (google.cloud.dataplex_v1.types.DataQualityRule.NonNullExpectation): + Row-level rule which evaluates whether each + column value is null. + + This field is a member of `oneof`_ ``rule_type``. + set_expectation (google.cloud.dataplex_v1.types.DataQualityRule.SetExpectation): + Row-level rule which evaluates whether each + column value is contained by a specified set. + + This field is a member of `oneof`_ ``rule_type``. + regex_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RegexExpectation): + Row-level rule which evaluates whether each + column value matches a specified regex. + + This field is a member of `oneof`_ ``rule_type``. + uniqueness_expectation (google.cloud.dataplex_v1.types.DataQualityRule.UniquenessExpectation): + Row-level rule which evaluates whether each + column value is unique. + + This field is a member of `oneof`_ ``rule_type``. + statistic_range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation): + Aggregate rule which evaluates whether the + column aggregate statistic lies between a + specified range. + + This field is a member of `oneof`_ ``rule_type``. + row_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RowConditionExpectation): + Row-level rule which evaluates whether each + row in a table passes the specified condition. + + This field is a member of `oneof`_ ``rule_type``. + table_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.TableConditionExpectation): + Aggregate rule which evaluates whether the + provided expression is true for a table. + + This field is a member of `oneof`_ ``rule_type``. + sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): + Aggregate rule which evaluates the number of + rows returned for the provided statement. If any + rows are returned, this rule fails. + + This field is a member of `oneof`_ ``rule_type``. + column (str): + Optional. The unnested column which this rule + is evaluated against. + ignore_null (bool): + Optional. Rows with ``null`` values will automatically fail + a rule, unless ``ignore_null`` is ``true``. In that case, + such ``null`` rows are trivially considered passing. + + This field is only valid for the following type of rules: + + - RangeExpectation + - RegexExpectation + - SetExpectation + - UniquenessExpectation + dimension (str): + Required. The dimension a rule belongs to. + Results are also aggregated at the dimension + level. Custom dimension name is supported with + all uppercase letters and maximum length of 30 + characters. + threshold (float): + Optional. The minimum ratio of **passing_rows / total_rows** + required to pass this rule, with a range of [0.0, 1.0]. + + 0 indicates default value (i.e. 1.0). + + This field is only valid for row-level type rules. + name (str): + Optional. A mutable name for the rule. + + - The name must contain only letters (a-z, A-Z), numbers + (0-9), or hyphens (-). + - The maximum length is 63 characters. + - Must start with a letter. + - Must end with a number or a letter. + description (str): + Optional. Description of the rule. + + - The maximum length is 1,024 characters. + suspended (bool): + Optional. Whether the Rule is active or + suspended. Default is false. + """ + + class RangeExpectation(proto.Message): + r"""Evaluates whether each column value lies between a specified + range. + + Attributes: + min_value (str): + Optional. The minimum column value allowed for a row to pass + this validation. At least one of ``min_value`` and + ``max_value`` need to be provided. + max_value (str): + Optional. The maximum column value allowed for a row to pass + this validation. At least one of ``min_value`` and + ``max_value`` need to be provided. + strict_min_enabled (bool): + Optional. Whether each value needs to be strictly greater + than ('>') the minimum, or if equality is allowed. + + Only relevant if a ``min_value`` has been defined. Default = + false. + strict_max_enabled (bool): + Optional. Whether each value needs to be strictly lesser + than ('<') the maximum, or if equality is allowed. + + Only relevant if a ``max_value`` has been defined. Default = + false. + """ + + min_value: str = proto.Field( + proto.STRING, + number=1, + ) + max_value: str = proto.Field( + proto.STRING, + number=2, + ) + strict_min_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + strict_max_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class NonNullExpectation(proto.Message): + r"""Evaluates whether each column value is null. + """ + + class SetExpectation(proto.Message): + r"""Evaluates whether each column value is contained by a + specified set. + + Attributes: + values (MutableSequence[str]): + Optional. Expected values for the column + value. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class RegexExpectation(proto.Message): + r"""Evaluates whether each column value matches a specified + regex. + + Attributes: + regex (str): + Optional. A regular expression the column + value is expected to match. + """ + + regex: str = proto.Field( + proto.STRING, + number=1, + ) + + class UniquenessExpectation(proto.Message): + r"""Evaluates whether the column has duplicates. + """ + + class StatisticRangeExpectation(proto.Message): + r"""Evaluates whether the column aggregate statistic lies between + a specified range. + + Attributes: + statistic (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation.ColumnStatistic): + Optional. The aggregate metric to evaluate. + min_value (str): + Optional. The minimum column statistic value allowed for a + row to pass this validation. + + At least one of ``min_value`` and ``max_value`` need to be + provided. + max_value (str): + Optional. The maximum column statistic value allowed for a + row to pass this validation. + + At least one of ``min_value`` and ``max_value`` need to be + provided. + strict_min_enabled (bool): + Optional. Whether column statistic needs to be strictly + greater than ('>') the minimum, or if equality is allowed. + + Only relevant if a ``min_value`` has been defined. Default = + false. + strict_max_enabled (bool): + Optional. Whether column statistic needs to be strictly + lesser than ('<') the maximum, or if equality is allowed. + + Only relevant if a ``max_value`` has been defined. Default = + false. + """ + class ColumnStatistic(proto.Enum): + r"""The list of aggregate metrics a rule can be evaluated + against. + + Values: + STATISTIC_UNDEFINED (0): + Unspecified statistic type + MEAN (1): + Evaluate the column mean + MIN (2): + Evaluate the column min + MAX (3): + Evaluate the column max + """ + STATISTIC_UNDEFINED = 0 + MEAN = 1 + MIN = 2 + MAX = 3 + + statistic: 'DataQualityRule.StatisticRangeExpectation.ColumnStatistic' = proto.Field( + proto.ENUM, + number=1, + enum='DataQualityRule.StatisticRangeExpectation.ColumnStatistic', + ) + min_value: str = proto.Field( + proto.STRING, + number=2, + ) + max_value: str = proto.Field( + proto.STRING, + number=3, + ) + strict_min_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + strict_max_enabled: bool = proto.Field( + proto.BOOL, + number=5, + ) + + class RowConditionExpectation(proto.Message): + r"""Evaluates whether each row passes the specified condition. + + The SQL expression needs to use `GoogleSQL + syntax `__ + and should produce a boolean value per row as the result. + + Example: col1 >= 0 AND col2 < 10 + + Attributes: + sql_expression (str): + Optional. The SQL expression. + """ + + sql_expression: str = proto.Field( + proto.STRING, + number=1, + ) + + class TableConditionExpectation(proto.Message): + r"""Evaluates whether the provided expression is true. + + The SQL expression needs to use `GoogleSQL + syntax `__ + and should produce a scalar boolean result. + + Example: MIN(col1) >= 0 + + Attributes: + sql_expression (str): + Optional. The SQL expression. + """ + + sql_expression: str = proto.Field( + proto.STRING, + number=1, + ) + + class SqlAssertion(proto.Message): + r"""A SQL statement that is evaluated to return rows that match an + invalid state. If any rows are are returned, this rule fails. + + The SQL statement must use `GoogleSQL + syntax `__, + and must not contain any semicolons. + + You can use the data reference parameter ``${data()}`` to reference + the source table with all of its precondition filters applied. + Examples of precondition filters include row filters, incremental + data filters, and sampling. For more information, see `Data + reference + parameter `__. + + Example: ``SELECT * FROM ${data()} WHERE price < 0`` + + Attributes: + sql_statement (str): + Optional. The SQL statement. + """ + + sql_statement: str = proto.Field( + proto.STRING, + number=1, + ) + + range_expectation: RangeExpectation = proto.Field( + proto.MESSAGE, + number=1, + oneof='rule_type', + message=RangeExpectation, + ) + non_null_expectation: NonNullExpectation = proto.Field( + proto.MESSAGE, + number=2, + oneof='rule_type', + message=NonNullExpectation, + ) + set_expectation: SetExpectation = proto.Field( + proto.MESSAGE, + number=3, + oneof='rule_type', + message=SetExpectation, + ) + regex_expectation: RegexExpectation = proto.Field( + proto.MESSAGE, + number=4, + oneof='rule_type', + message=RegexExpectation, + ) + uniqueness_expectation: UniquenessExpectation = proto.Field( + proto.MESSAGE, + number=100, + oneof='rule_type', + message=UniquenessExpectation, + ) + statistic_range_expectation: StatisticRangeExpectation = proto.Field( + proto.MESSAGE, + number=101, + oneof='rule_type', + message=StatisticRangeExpectation, + ) + row_condition_expectation: RowConditionExpectation = proto.Field( + proto.MESSAGE, + number=200, + oneof='rule_type', + message=RowConditionExpectation, + ) + table_condition_expectation: TableConditionExpectation = proto.Field( + proto.MESSAGE, + number=201, + oneof='rule_type', + message=TableConditionExpectation, + ) + sql_assertion: SqlAssertion = proto.Field( + proto.MESSAGE, + number=202, + oneof='rule_type', + message=SqlAssertion, + ) + column: str = proto.Field( + proto.STRING, + number=500, + ) + ignore_null: bool = proto.Field( + proto.BOOL, + number=501, + ) + dimension: str = proto.Field( + proto.STRING, + number=502, + ) + threshold: float = proto.Field( + proto.DOUBLE, + number=503, + ) + name: str = proto.Field( + proto.STRING, + number=504, + ) + description: str = proto.Field( + proto.STRING, + number=505, + ) + suspended: bool = proto.Field( + proto.BOOL, + number=506, + ) + + +class DataQualityColumnResult(proto.Message): + r"""DataQualityColumnResult provides a more detailed, per-column + view of the results. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + column (str): + Output only. The column specified in the + DataQualityRule. + score (float): + Output only. The column-level data quality score for this + data scan job if and only if the 'column' field is set. + + The score ranges between between [0, 100] (up to two decimal + points). + + This field is a member of `oneof`_ ``_score``. + passed (bool): + Output only. Whether the column passed or + failed. + dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): + Output only. The dimension-level results for + this column. + """ + + column: str = proto.Field( + proto.STRING, + number=1, + ) + score: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + passed: bool = proto.Field( + proto.BOOL, + number=3, + ) + dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='DataQualityDimensionResult', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py new file mode 100644 index 000000000000..bc788d18cd85 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py @@ -0,0 +1,972 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import security +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataTaxonomy', + 'DataAttribute', + 'DataAttributeBinding', + 'CreateDataTaxonomyRequest', + 'UpdateDataTaxonomyRequest', + 'GetDataTaxonomyRequest', + 'ListDataTaxonomiesRequest', + 'ListDataTaxonomiesResponse', + 'DeleteDataTaxonomyRequest', + 'CreateDataAttributeRequest', + 'UpdateDataAttributeRequest', + 'GetDataAttributeRequest', + 'ListDataAttributesRequest', + 'ListDataAttributesResponse', + 'DeleteDataAttributeRequest', + 'CreateDataAttributeBindingRequest', + 'UpdateDataAttributeBindingRequest', + 'GetDataAttributeBindingRequest', + 'ListDataAttributeBindingsRequest', + 'ListDataAttributeBindingsResponse', + 'DeleteDataAttributeBindingRequest', + }, +) + + +class DataTaxonomy(proto.Message): + r"""DataTaxonomy represents a set of hierarchical DataAttributes + resources, grouped with a common theme Eg: + 'SensitiveDataTaxonomy' can have attributes to manage PII data. + It is defined at project level. + + Attributes: + name (str): + Output only. The relative resource name of the DataTaxonomy, + of the form: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}. + uid (str): + Output only. System generated globally unique + ID for the dataTaxonomy. This ID will be + different if the DataTaxonomy is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataTaxonomy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataTaxonomy + was last updated. + description (str): + Optional. Description of the DataTaxonomy. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataTaxonomy. + attribute_count (int): + Output only. The number of attributes in the + DataTaxonomy. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + class_count (int): + Output only. The number of classes in the + DataTaxonomy. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + attribute_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + class_count: int = proto.Field( + proto.INT32, + number=11, + ) + + +class DataAttribute(proto.Message): + r"""Denotes one dataAttribute in a dataTaxonomy, for example, PII. + DataAttribute resources can be defined in a hierarchy. A single + dataAttribute resource can contain specs of multiple types + + :: + + PII + - ResourceAccessSpec : + - readers :foo@bar.com + - DataAccessSpec : + - readers :bar@foo.com + + Attributes: + name (str): + Output only. The relative resource name of the + dataAttribute, of the form: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}. + uid (str): + Output only. System generated globally unique + ID for the DataAttribute. This ID will be + different if the DataAttribute is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataAttribute + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataAttribute + was last updated. + description (str): + Optional. Description of the DataAttribute. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataAttribute. + parent_id (str): + Optional. The ID of the parent DataAttribute resource, + should belong to the same data taxonomy. Circular dependency + in parent chain is not valid. Maximum depth of the hierarchy + allowed is 4. [a -> b -> c -> d -> e, depth = 4] + attribute_count (int): + Output only. The number of child attributes + present for this attribute. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + resource_access_spec (google.cloud.dataplex_v1.types.ResourceAccessSpec): + Optional. Specified when applied to a + resource (eg: Cloud Storage bucket, BigQuery + dataset, BigQuery table). + data_access_spec (google.cloud.dataplex_v1.types.DataAccessSpec): + Optional. Specified when applied to data + stored on the resource (eg: rows, columns in + BigQuery Tables). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent_id: str = proto.Field( + proto.STRING, + number=8, + ) + attribute_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + resource_access_spec: security.ResourceAccessSpec = proto.Field( + proto.MESSAGE, + number=100, + message=security.ResourceAccessSpec, + ) + data_access_spec: security.DataAccessSpec = proto.Field( + proto.MESSAGE, + number=101, + message=security.DataAccessSpec, + ) + + +class DataAttributeBinding(proto.Message): + r"""DataAttributeBinding represents binding of attributes to + resources. Eg: Bind 'CustomerInfo' entity with 'PII' attribute. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the Data + Attribute Binding, of the form: + projects/{project_number}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id} + uid (str): + Output only. System generated globally unique + ID for the DataAttributeBinding. This ID will be + different if the DataAttributeBinding is deleted + and re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the + DataAttributeBinding was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the + DataAttributeBinding was last updated. + description (str): + Optional. Description of the + DataAttributeBinding. + display_name (str): + Optional. User friendly display name. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + DataAttributeBinding. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. Etags + must be used when calling the + DeleteDataAttributeBinding and the + UpdateDataAttributeBinding method. + resource (str): + Optional. Immutable. The resource name of the resource that + is associated to attributes. Presently, only entity resource + is supported in the form: + projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity_id} + Must belong in the same project and region as the attribute + binding, and there can only exist one active binding for a + resource. + + This field is a member of `oneof`_ ``resource_reference``. + attributes (MutableSequence[str]): + Optional. List of attributes to be associated with the + resource, provided in the form: + projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + paths (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding.Path]): + Optional. The list of paths for items within + the associated resource (eg. columns and + partitions within a table) along with attribute + bindings. + """ + + class Path(proto.Message): + r"""Represents a subresource of the given resource, and + associated bindings with it. Currently supported subresources + are column and partition schema fields within a table. + + Attributes: + name (str): + Required. The name identifier of the path. + Nested columns should be of the form: + 'address.city'. + attributes (MutableSequence[str]): + Optional. List of attributes to be associated with the path + of the resource, provided in the form: + projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + attributes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + resource: str = proto.Field( + proto.STRING, + number=100, + oneof='resource_reference', + ) + attributes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=110, + ) + paths: MutableSequence[Path] = proto.RepeatedField( + proto.MESSAGE, + number=120, + message=Path, + ) + + +class CreateDataTaxonomyRequest(proto.Message): + r"""Create DataTaxonomy request. + + Attributes: + parent (str): + + data_taxonomy_id (str): + Required. DataTaxonomy identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Project. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. DataTaxonomy resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_taxonomy_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_taxonomy: 'DataTaxonomy' = proto.Field( + proto.MESSAGE, + number=3, + message='DataTaxonomy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataTaxonomyRequest(proto.Message): + r"""Update DataTaxonomy request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_taxonomy: 'DataTaxonomy' = proto.Field( + proto.MESSAGE, + number=2, + message='DataTaxonomy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataTaxonomyRequest(proto.Message): + r"""Get DataTaxonomy request. + + Attributes: + name (str): + + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataTaxonomiesRequest(proto.Message): + r"""List DataTaxonomies request. + + Attributes: + parent (str): + Required. The resource name of the DataTaxonomy location, of + the form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of DataTaxonomies to + return. The service may return fewer than this + value. If unspecified, at most 10 DataTaxonomies + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataTaxonomies`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataTaxonomies`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataTaxonomiesResponse(proto.Message): + r"""List DataTaxonomies response. + + Attributes: + data_taxonomies (MutableSequence[google.cloud.dataplex_v1.types.DataTaxonomy]): + DataTaxonomies under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_taxonomies: MutableSequence['DataTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataTaxonomy', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataTaxonomyRequest(proto.Message): + r"""Delete DataTaxonomy request. + + Attributes: + name (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + etag (str): + Optional. If the client provided etag value + does not match the current etag value,the + DeleteDataTaxonomy method returns an ABORTED + error. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataAttributeRequest(proto.Message): + r"""Create DataAttribute request. + + Attributes: + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + data_attribute_id (str): + Required. DataAttribute identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the DataTaxonomy. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. DataAttribute resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_attribute_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_attribute: 'DataAttribute' = proto.Field( + proto.MESSAGE, + number=3, + message='DataAttribute', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataAttributeRequest(proto.Message): + r"""Update DataAttribute request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_attribute (google.cloud.dataplex_v1.types.DataAttribute): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_attribute: 'DataAttribute' = proto.Field( + proto.MESSAGE, + number=2, + message='DataAttribute', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataAttributeRequest(proto.Message): + r"""Get DataAttribute request. + + Attributes: + name (str): + Required. The resource name of the dataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataAttributesRequest(proto.Message): + r"""List DataAttributes request. + + Attributes: + parent (str): + Required. The resource name of the DataTaxonomy: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} + page_size (int): + Optional. Maximum number of DataAttributes to + return. The service may return fewer than this + value. If unspecified, at most 10 dataAttributes + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataAttributes`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataAttributes`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataAttributesResponse(proto.Message): + r"""List DataAttributes response. + + Attributes: + data_attributes (MutableSequence[google.cloud.dataplex_v1.types.DataAttribute]): + DataAttributes under the given parent + DataTaxonomy. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_attributes: MutableSequence['DataAttribute'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataAttribute', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataAttributeRequest(proto.Message): + r"""Delete DataAttribute request. + + Attributes: + name (str): + Required. The resource name of the DataAttribute: + projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} + etag (str): + Optional. If the client provided etag value + does not match the current etag value, the + DeleteDataAttribute method returns an ABORTED + error response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataAttributeBindingRequest(proto.Message): + r"""Create DataAttributeBinding request. + + Attributes: + parent (str): + Required. The resource name of the parent data taxonomy + projects/{project_number}/locations/{location_id} + data_attribute_binding_id (str): + Required. DataAttributeBinding identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the Location. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. DataAttributeBinding resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_attribute_binding_id: str = proto.Field( + proto.STRING, + number=2, + ) + data_attribute_binding: 'DataAttributeBinding' = proto.Field( + proto.MESSAGE, + number=3, + message='DataAttributeBinding', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataAttributeBindingRequest(proto.Message): + r"""Update DataAttributeBinding request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): + Required. Only fields specified in ``update_mask`` are + updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_attribute_binding: 'DataAttributeBinding' = proto.Field( + proto.MESSAGE, + number=2, + message='DataAttributeBinding', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class GetDataAttributeBindingRequest(proto.Message): + r"""Get DataAttributeBinding request. + + Attributes: + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataAttributeBindingsRequest(proto.Message): + r"""List DataAttributeBindings request. + + Attributes: + parent (str): + Required. The resource name of the Location: + projects/{project_number}/locations/{location_id} + page_size (int): + Optional. Maximum number of + DataAttributeBindings to return. The service may + return fewer than this value. If unspecified, at + most 10 DataAttributeBindings will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataAttributeBindings`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListDataAttributeBindings`` must match the + call that provided the page token. + filter (str): + Optional. Filter request. + Filter using resource: + filter=resource:"resource-name" Filter using + attribute: filter=attributes:"attribute-name" + Filter using attribute in paths list: + + filter=paths.attributes:"attribute-name". + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataAttributeBindingsResponse(proto.Message): + r"""List DataAttributeBindings response. + + Attributes: + data_attribute_bindings (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding]): + DataAttributeBindings under the given parent + Location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_attribute_bindings: MutableSequence['DataAttributeBinding'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataAttributeBinding', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteDataAttributeBindingRequest(proto.Message): + r"""Delete DataAttributeBinding request. + + Attributes: + name (str): + Required. The resource name of the DataAttributeBinding: + projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} + etag (str): + Required. If the client provided etag value + does not match the current etag value, the + DeleteDataAttributeBindingRequest method returns + an ABORTED error response. Etags must be used + when calling the DeleteDataAttributeBinding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py new file mode 100644 index 000000000000..4994b1db31a0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py @@ -0,0 +1,931 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataScanType', + 'CreateDataScanRequest', + 'UpdateDataScanRequest', + 'DeleteDataScanRequest', + 'GetDataScanRequest', + 'ListDataScansRequest', + 'ListDataScansResponse', + 'RunDataScanRequest', + 'RunDataScanResponse', + 'GetDataScanJobRequest', + 'ListDataScanJobsRequest', + 'ListDataScanJobsResponse', + 'GenerateDataQualityRulesRequest', + 'GenerateDataQualityRulesResponse', + 'DataScan', + 'DataScanJob', + }, +) + + +class DataScanType(proto.Enum): + r"""The type of data scan. + + Values: + DATA_SCAN_TYPE_UNSPECIFIED (0): + The data scan type is unspecified. + DATA_QUALITY (1): + Data quality scan. + DATA_PROFILE (2): + Data profile scan. + DATA_DISCOVERY (3): + Data discovery scan. + """ + DATA_SCAN_TYPE_UNSPECIFIED = 0 + DATA_QUALITY = 1 + DATA_PROFILE = 2 + DATA_DISCOVERY = 3 + + +class CreateDataScanRequest(proto.Message): + r"""Create dataScan request. + + Attributes: + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* and + ``location_id`` refers to a Google Cloud region. + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource. + data_scan_id (str): + Required. DataScan identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + validate_only (bool): + Optional. Only validate the request, but do not perform + mutations. The default is ``false``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_scan: 'DataScan' = proto.Field( + proto.MESSAGE, + number=2, + message='DataScan', + ) + data_scan_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateDataScanRequest(proto.Message): + r"""Update dataScan request. + + Attributes: + data_scan (google.cloud.dataplex_v1.types.DataScan): + Required. DataScan resource to be updated. + + Only fields specified in ``update_mask`` are updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Mask of fields to update. + validate_only (bool): + Optional. Only validate the request, but do not perform + mutations. The default is ``false``. + """ + + data_scan: 'DataScan' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScan', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteDataScanRequest(proto.Message): + r"""Delete dataScan request. + + Attributes: + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + force (bool): + Optional. If set to true, any child resources + of this data scan will also be deleted. + (Otherwise, the request will only work if the + data scan has no child resources.) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class GetDataScanRequest(proto.Message): + r"""Get dataScan request. + + Attributes: + name (str): + Required. The resource name of the dataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): + Optional. Select the DataScan view to return. Defaults to + ``BASIC``. + """ + class DataScanView(proto.Enum): + r"""DataScan view options. + + Values: + DATA_SCAN_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Basic view that does not include *spec* and *result*. + FULL (10): + Include everything. + """ + DATA_SCAN_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: DataScanView = proto.Field( + proto.ENUM, + number=2, + enum=DataScanView, + ) + + +class ListDataScansRequest(proto.Message): + r"""List dataScans request. + + Attributes: + parent (str): + Required. The resource name of the parent location: + ``projects/{project}/locations/{location_id}`` where + ``project`` refers to a *project_id* or *project_number* and + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of dataScans to + return. The service may return fewer than this + value. If unspecified, at most 500 scans will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataScans`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataScans`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields (``name`` or ``create_time``) for + the result. If not specified, the ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataScansResponse(proto.Message): + r"""List dataScans response. + + Attributes: + data_scans (MutableSequence[google.cloud.dataplex_v1.types.DataScan]): + DataScans (``BASIC`` view only) under the given parent + location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_scans: MutableSequence['DataScan'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataScan', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class RunDataScanRequest(proto.Message): + r"""Run DataScan Request + + Attributes: + name (str): + Required. The resource name of the DataScan: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + + Only **OnDemand** data scans are allowed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunDataScanResponse(proto.Message): + r"""Run DataScan Response. + + Attributes: + job (google.cloud.dataplex_v1.types.DataScanJob): + DataScanJob created by RunDataScan request. + """ + + job: 'DataScanJob' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScanJob', + ) + + +class GetDataScanJobRequest(proto.Message): + r"""Get DataScanJob request. + + Attributes: + name (str): + Required. The resource name of the DataScanJob: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): + Optional. Select the DataScanJob view to return. Defaults to + ``BASIC``. + """ + class DataScanJobView(proto.Enum): + r"""DataScanJob view options. + + Values: + DATA_SCAN_JOB_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Basic view that does not include *spec* and *result*. + FULL (10): + Include everything. + """ + DATA_SCAN_JOB_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: DataScanJobView = proto.Field( + proto.ENUM, + number=2, + enum=DataScanJobView, + ) + + +class ListDataScanJobsRequest(proto.Message): + r"""List DataScanJobs request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + page_size (int): + Optional. Maximum number of DataScanJobs to + return. The service may return fewer than this + value. If unspecified, at most 10 DataScanJobs + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListDataScanJobs`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataScanJobs`` must match the call that + provided the page token. + filter (str): + Optional. An expression for filtering the results of the + ListDataScanJobs request. + + If unspecified, all datascan jobs will be returned. Multiple + filters can be applied (with ``AND``, ``OR`` logical + operators). Filters are case-sensitive. + + Allowed fields are: + + - ``start_time`` + - ``end_time`` + + ``start_time`` and ``end_time`` expect RFC-3339 formatted + strings (e.g. 2018-10-08T18:30:00-07:00). + + For instance, 'start_time > 2018-10-08T00:00:00.123456789Z + AND end_time < 2018-10-09T00:00:00.123456789Z' limits + results to DataScanJobs between specified start and end + times. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDataScanJobsResponse(proto.Message): + r"""List DataScanJobs response. + + Attributes: + data_scan_jobs (MutableSequence[google.cloud.dataplex_v1.types.DataScanJob]): + DataScanJobs (``BASIC`` view only) under a given dataScan. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + data_scan_jobs: MutableSequence['DataScanJob'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataScanJob', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GenerateDataQualityRulesRequest(proto.Message): + r"""Request details for generating data quality rule + recommendations. + + Attributes: + name (str): + Required. The name must be one of the following: + + - The name of a data scan with at least one successful, + completed data profiling job + - The name of a successful, completed data profiling job (a + data scan job where the job type is data profiling) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GenerateDataQualityRulesResponse(proto.Message): + r"""Response details for data quality rule recommendations. + + Attributes: + rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): + The data quality rules that Dataplex + Universal Catalog generates based on the results + of a data profiling scan. + """ + + rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data_quality.DataQualityRule, + ) + + +class DataScan(proto.Message): + r"""Represents a user-visible job which provides the insights for the + related data source. + + For example: + + - Data quality: generates queries based on the rules and runs + against the data to get data quality check results. For more + information, see `Auto data quality + overview `__. + - Data profile: analyzes the data in tables and generates insights + about the structure, content and relationships (such as null + percent, cardinality, min/max/mean, etc). For more information, + see `About data + profiling `__. + - Data discovery: scans data in Cloud Storage buckets to extract and + then catalog metadata. For more information, see `Discover and + catalog Cloud Storage + data `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The relative resource name of the + scan, of the form: + ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + uid (str): + Output only. System generated globally unique + ID for the scan. This ID will be different if + the scan is deleted and re-created with the same + name. + description (str): + Optional. Description of the scan. + + - Must be between 1-1024 characters. + display_name (str): + Optional. User friendly display name. + + - Must be between 1-256 characters. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the scan. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the DataScan. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the scan was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the scan was last + updated. + data (google.cloud.dataplex_v1.types.DataSource): + Required. The data source for DataScan. + execution_spec (google.cloud.dataplex_v1.types.DataScan.ExecutionSpec): + Optional. DataScan execution settings. + + If not specified, the fields in it will use + their default values. + execution_status (google.cloud.dataplex_v1.types.DataScan.ExecutionStatus): + Output only. Status of the data scan + execution. + type_ (google.cloud.dataplex_v1.types.DataScanType): + Output only. The type of DataScan. + data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): + Settings for a data quality scan. + + This field is a member of `oneof`_ ``spec``. + data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): + Settings for a data profile scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Settings for a data discovery scan. + + This field is a member of `oneof`_ ``spec``. + data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): + Output only. The result of a data quality + scan. + + This field is a member of `oneof`_ ``result``. + data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery + scan. + + This field is a member of `oneof`_ ``result``. + """ + + class ExecutionSpec(proto.Message): + r"""DataScan execution settings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + trigger (google.cloud.dataplex_v1.types.Trigger): + Optional. Spec related to how often and when a scan should + be triggered. + + If not specified, the default is ``OnDemand``, which means + the scan will not run until the user calls ``RunDataScan`` + API. + field (str): + Immutable. The unnested field (of type *Date* or + *Timestamp*) that contains values which monotonically + increase over time. + + If not specified, a data scan will run for all data in the + table. + + This field is a member of `oneof`_ ``incremental``. + """ + + trigger: processing.Trigger = proto.Field( + proto.MESSAGE, + number=1, + message=processing.Trigger, + ) + field: str = proto.Field( + proto.STRING, + number=100, + oneof='incremental', + ) + + class ExecutionStatus(proto.Message): + r"""Status of the data scan execution. + + Attributes: + latest_job_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the latest + DataScanJob started. + latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the latest + DataScanJob ended. + latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the DataScanJob + execution was created. + """ + + latest_job_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + latest_job_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + latest_job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=6, + enum=resources.State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + data: processing.DataSource = proto.Field( + proto.MESSAGE, + number=9, + message=processing.DataSource, + ) + execution_spec: ExecutionSpec = proto.Field( + proto.MESSAGE, + number=10, + message=ExecutionSpec, + ) + execution_status: ExecutionStatus = proto.Field( + proto.MESSAGE, + number=11, + message=ExecutionStatus, + ) + type_: 'DataScanType' = proto.Field( + proto.ENUM, + number=12, + enum='DataScanType', + ) + data_quality_spec: data_quality.DataQualitySpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=data_quality.DataQualitySpec, + ) + data_profile_spec: data_profile.DataProfileSpec = proto.Field( + proto.MESSAGE, + number=101, + oneof='spec', + message=data_profile.DataProfileSpec, + ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof='spec', + message=data_discovery.DataDiscoverySpec, + ) + data_quality_result: data_quality.DataQualityResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=data_quality.DataQualityResult, + ) + data_profile_result: data_profile.DataProfileResult = proto.Field( + proto.MESSAGE, + number=201, + oneof='result', + message=data_profile.DataProfileResult, + ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof='result', + message=data_discovery.DataDiscoveryResult, + ) + + +class DataScanJob(proto.Message): + r"""A DataScanJob represents an instance of DataScan execution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The relative resource name of the + DataScanJob, of the form: + ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, + where ``project`` refers to a *project_id* or + *project_number* and ``location_id`` refers to a Google + Cloud region. + uid (str): + Output only. System generated globally unique + ID for the DataScanJob. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + ended. + state (google.cloud.dataplex_v1.types.DataScanJob.State): + Output only. Execution state for the + DataScanJob. + message (str): + Output only. Additional information about the + current state. + type_ (google.cloud.dataplex_v1.types.DataScanType): + Output only. The type of the parent DataScan. + data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): + Output only. Settings for a data quality + scan. + + This field is a member of `oneof`_ ``spec``. + data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): + Output only. Settings for a data profile + scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Output only. Settings for a data discovery + scan. + + This field is a member of `oneof`_ ``spec``. + data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): + Output only. The result of a data quality + scan. + + This field is a member of `oneof`_ ``result``. + data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery + scan. + + This field is a member of `oneof`_ ``result``. + """ + class State(proto.Enum): + r"""Execution state for the DataScanJob. + + Values: + STATE_UNSPECIFIED (0): + The DataScanJob state is unspecified. + RUNNING (1): + The DataScanJob is running. + CANCELING (2): + The DataScanJob is canceling. + CANCELLED (3): + The DataScanJob cancellation was successful. + SUCCEEDED (4): + The DataScanJob completed successfully. + FAILED (5): + The DataScanJob is no longer running due to + an error. + PENDING (7): + The DataScanJob has been created but not + started to run yet. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + CANCELING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + FAILED = 5 + PENDING = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + message: str = proto.Field( + proto.STRING, + number=6, + ) + type_: 'DataScanType' = proto.Field( + proto.ENUM, + number=7, + enum='DataScanType', + ) + data_quality_spec: data_quality.DataQualitySpec = proto.Field( + proto.MESSAGE, + number=100, + oneof='spec', + message=data_quality.DataQualitySpec, + ) + data_profile_spec: data_profile.DataProfileSpec = proto.Field( + proto.MESSAGE, + number=101, + oneof='spec', + message=data_profile.DataProfileSpec, + ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof='spec', + message=data_discovery.DataDiscoverySpec, + ) + data_quality_result: data_quality.DataQualityResult = proto.Field( + proto.MESSAGE, + number=200, + oneof='result', + message=data_quality.DataQualityResult, + ) + data_profile_result: data_profile.DataProfileResult = proto.Field( + proto.MESSAGE, + number=201, + oneof='result', + message=data_profile.DataProfileResult, + ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof='result', + message=data_discovery.DataDiscoveryResult, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py new file mode 100644 index 000000000000..a23567bed152 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DataScanCatalogPublishingStatus', + }, +) + + +class DataScanCatalogPublishingStatus(proto.Message): + r"""The status of publishing the data scan result as Dataplex + Universal Catalog metadata. + + Attributes: + state (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus.State): + Output only. Execution state for catalog + publishing. + """ + class State(proto.Enum): + r"""Execution state for the publishing. + + Values: + STATE_UNSPECIFIED (0): + The publishing state is unspecified. + SUCCEEDED (1): + Publish to catalog completed successfully. + FAILED (2): + Publish to catalog failed. + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py new file mode 100644 index 000000000000..4eccf1917483 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py @@ -0,0 +1,1467 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import datascans_common +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'DiscoveryEvent', + 'JobEvent', + 'SessionEvent', + 'GovernanceEvent', + 'DataScanEvent', + 'DataQualityScanRuleResult', + 'BusinessGlossaryEvent', + 'EntryLinkEvent', + }, +) + + +class DiscoveryEvent(proto.Message): + r"""The payload associated with Discovery data processing. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + lake_id (str): + The id of the associated lake. + zone_id (str): + The id of the associated zone. + asset_id (str): + The id of the associated asset. + data_location (str): + The data location associated with the event. + datascan_id (str): + The id of the associated datascan for + standalone discovery. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): + The type of the event being logged. + config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): + Details about discovery configuration in + effect. + + This field is a member of `oneof`_ ``details``. + entity (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityDetails): + Details about the entity associated with the + event. + + This field is a member of `oneof`_ ``details``. + partition (google.cloud.dataplex_v1.types.DiscoveryEvent.PartitionDetails): + Details about the partition associated with + the event. + + This field is a member of `oneof`_ ``details``. + action (google.cloud.dataplex_v1.types.DiscoveryEvent.ActionDetails): + Details about the action associated with the + event. + + This field is a member of `oneof`_ ``details``. + table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): + Details about the BigQuery table publishing + associated with the event. + + This field is a member of `oneof`_ ``details``. + """ + class EventType(proto.Enum): + r"""The type of the event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + CONFIG (1): + An event representing discovery configuration + in effect. + ENTITY_CREATED (2): + An event representing a metadata entity being + created. + ENTITY_UPDATED (3): + An event representing a metadata entity being + updated. + ENTITY_DELETED (4): + An event representing a metadata entity being + deleted. + PARTITION_CREATED (5): + An event representing a partition being + created. + PARTITION_UPDATED (6): + An event representing a partition being + updated. + PARTITION_DELETED (7): + An event representing a partition being + deleted. + TABLE_PUBLISHED (10): + An event representing a table being + published. + TABLE_UPDATED (11): + An event representing a table being updated. + TABLE_IGNORED (12): + An event representing a table being skipped + in publishing. + TABLE_DELETED (13): + An event representing a table being deleted. + """ + EVENT_TYPE_UNSPECIFIED = 0 + CONFIG = 1 + ENTITY_CREATED = 2 + ENTITY_UPDATED = 3 + ENTITY_DELETED = 4 + PARTITION_CREATED = 5 + PARTITION_UPDATED = 6 + PARTITION_DELETED = 7 + TABLE_PUBLISHED = 10 + TABLE_UPDATED = 11 + TABLE_IGNORED = 12 + TABLE_DELETED = 13 + + class EntityType(proto.Enum): + r"""The type of the entity. + + Values: + ENTITY_TYPE_UNSPECIFIED (0): + An unspecified event type. + TABLE (1): + Entities representing structured data. + FILESET (2): + Entities representing unstructured data. + """ + ENTITY_TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + class TableType(proto.Enum): + r"""The type of the published table. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + An unspecified table type. + EXTERNAL_TABLE (1): + External table type. + BIGLAKE_TABLE (2): + BigLake table type. + OBJECT_TABLE (3): + Object table type for unstructured data. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL_TABLE = 1 + BIGLAKE_TABLE = 2 + OBJECT_TABLE = 3 + + class ConfigDetails(proto.Message): + r"""Details about configuration events. + + Attributes: + parameters (MutableMapping[str, str]): + A list of discovery configuration parameters + in effect. The keys are the field paths within + DiscoverySpec. Eg. includePatterns, + excludePatterns, + csvOptions.disableTypeInference, etc. + """ + + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + class EntityDetails(proto.Message): + r"""Details about the entity. + + Attributes: + entity (str): + The name of the entity resource. + The name is the fully-qualified resource name. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): + The type of the entity resource. + """ + + entity: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DiscoveryEvent.EntityType' = proto.Field( + proto.ENUM, + number=2, + enum='DiscoveryEvent.EntityType', + ) + + class TableDetails(proto.Message): + r"""Details about the published table. + + Attributes: + table (str): + The fully-qualified resource name of the + table resource. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): + The type of the table resource. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DiscoveryEvent.TableType' = proto.Field( + proto.ENUM, + number=2, + enum='DiscoveryEvent.TableType', + ) + + class PartitionDetails(proto.Message): + r"""Details about the partition. + + Attributes: + partition (str): + The name to the partition resource. + The name is the fully-qualified resource name. + entity (str): + The name to the containing entity resource. + The name is the fully-qualified resource name. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): + The type of the containing entity resource. + sampled_data_locations (MutableSequence[str]): + The locations of the data items (e.g., a + Cloud Storage objects) sampled for metadata + inference. + """ + + partition: str = proto.Field( + proto.STRING, + number=1, + ) + entity: str = proto.Field( + proto.STRING, + number=2, + ) + type_: 'DiscoveryEvent.EntityType' = proto.Field( + proto.ENUM, + number=3, + enum='DiscoveryEvent.EntityType', + ) + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ActionDetails(proto.Message): + r"""Details about the action. + + Attributes: + type_ (str): + The type of action. + Eg. IncompatibleDataSchema, InvalidDataFormat + issue (str): + The human readable issue associated with the + action. + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + lake_id: str = proto.Field( + proto.STRING, + number=2, + ) + zone_id: str = proto.Field( + proto.STRING, + number=3, + ) + asset_id: str = proto.Field( + proto.STRING, + number=4, + ) + data_location: str = proto.Field( + proto.STRING, + number=5, + ) + datascan_id: str = proto.Field( + proto.STRING, + number=6, + ) + type_: EventType = proto.Field( + proto.ENUM, + number=10, + enum=EventType, + ) + config: ConfigDetails = proto.Field( + proto.MESSAGE, + number=20, + oneof='details', + message=ConfigDetails, + ) + entity: EntityDetails = proto.Field( + proto.MESSAGE, + number=21, + oneof='details', + message=EntityDetails, + ) + partition: PartitionDetails = proto.Field( + proto.MESSAGE, + number=22, + oneof='details', + message=PartitionDetails, + ) + action: ActionDetails = proto.Field( + proto.MESSAGE, + number=23, + oneof='details', + message=ActionDetails, + ) + table: TableDetails = proto.Field( + proto.MESSAGE, + number=24, + oneof='details', + message=TableDetails, + ) + + +class JobEvent(proto.Message): + r"""The payload associated with Job logs that contains events + describing jobs that have run within a Lake. + + Attributes: + message (str): + The log message. + job_id (str): + The unique id identifying the job. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the job started running. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the job ended running. + state (google.cloud.dataplex_v1.types.JobEvent.State): + The job state on completion. + retries (int): + The number of retries. + type_ (google.cloud.dataplex_v1.types.JobEvent.Type): + The type of the job. + service (google.cloud.dataplex_v1.types.JobEvent.Service): + The service used to execute the job. + service_job (str): + The reference to the job within the service. + execution_trigger (google.cloud.dataplex_v1.types.JobEvent.ExecutionTrigger): + Job execution trigger. + """ + class Type(proto.Enum): + r"""The type of the job. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified job type. + SPARK (1): + Spark jobs. + NOTEBOOK (2): + Notebook jobs. + """ + TYPE_UNSPECIFIED = 0 + SPARK = 1 + NOTEBOOK = 2 + + class State(proto.Enum): + r"""The completion status of the job. + + Values: + STATE_UNSPECIFIED (0): + Unspecified job state. + SUCCEEDED (1): + Job successfully completed. + FAILED (2): + Job was unsuccessful. + CANCELLED (3): + Job was cancelled by the user. + ABORTED (4): + Job was cancelled or aborted via the service + executing the job. + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + CANCELLED = 3 + ABORTED = 4 + + class Service(proto.Enum): + r"""The service used to execute the job. + + Values: + SERVICE_UNSPECIFIED (0): + Unspecified service. + DATAPROC (1): + Cloud Dataproc. + """ + SERVICE_UNSPECIFIED = 0 + DATAPROC = 1 + + class ExecutionTrigger(proto.Enum): + r"""Job Execution trigger. + + Values: + EXECUTION_TRIGGER_UNSPECIFIED (0): + The job execution trigger is unspecified. + TASK_CONFIG (1): + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. + RUN_REQUEST (2): + The job was triggered by the explicit call of + Task API. + """ + EXECUTION_TRIGGER_UNSPECIFIED = 0 + TASK_CONFIG = 1 + RUN_REQUEST = 2 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + retries: int = proto.Field( + proto.INT32, + number=6, + ) + type_: Type = proto.Field( + proto.ENUM, + number=7, + enum=Type, + ) + service: Service = proto.Field( + proto.ENUM, + number=8, + enum=Service, + ) + service_job: str = proto.Field( + proto.STRING, + number=9, + ) + execution_trigger: ExecutionTrigger = proto.Field( + proto.ENUM, + number=11, + enum=ExecutionTrigger, + ) + + +class SessionEvent(proto.Message): + r"""These messages contain information about sessions within an + environment. The monitored resource is 'Environment'. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + user_id (str): + The information about the user that created + the session. It will be the email address of the + user. + session_id (str): + Unique identifier for the session. + type_ (google.cloud.dataplex_v1.types.SessionEvent.EventType): + The type of the event. + query (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail): + The execution details of the query. + + This field is a member of `oneof`_ ``detail``. + event_succeeded (bool): + The status of the event. + fast_startup_enabled (bool): + If the session is associated with an + environment with fast startup enabled, and was + created before being assigned to a user. + unassigned_duration (google.protobuf.duration_pb2.Duration): + The idle duration of a warm pooled session + before it is assigned to user. + """ + class EventType(proto.Enum): + r"""The type of the event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + START (1): + Event when the session is assigned to a user. + STOP (2): + Event for stop of a session. + QUERY (3): + Query events in the session. + CREATE (4): + Event for creation of a cluster. It is not + yet assigned to a user. This comes before START + in the sequence + """ + EVENT_TYPE_UNSPECIFIED = 0 + START = 1 + STOP = 2 + QUERY = 3 + CREATE = 4 + + class QueryDetail(proto.Message): + r"""Execution details of the query. + + Attributes: + query_id (str): + The unique Query id identifying the query. + query_text (str): + The query text executed. + engine (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail.Engine): + Query Execution engine. + duration (google.protobuf.duration_pb2.Duration): + Time taken for execution of the query. + result_size_bytes (int): + The size of results the query produced. + data_processed_bytes (int): + The data processed by the query. + """ + class Engine(proto.Enum): + r"""Query Execution engine. + + Values: + ENGINE_UNSPECIFIED (0): + An unspecified Engine type. + SPARK_SQL (1): + Spark-sql engine is specified in Query. + BIGQUERY (2): + BigQuery engine is specified in Query. + """ + ENGINE_UNSPECIFIED = 0 + SPARK_SQL = 1 + BIGQUERY = 2 + + query_id: str = proto.Field( + proto.STRING, + number=1, + ) + query_text: str = proto.Field( + proto.STRING, + number=2, + ) + engine: 'SessionEvent.QueryDetail.Engine' = proto.Field( + proto.ENUM, + number=3, + enum='SessionEvent.QueryDetail.Engine', + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + result_size_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + data_processed_bytes: int = proto.Field( + proto.INT64, + number=6, + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: str = proto.Field( + proto.STRING, + number=2, + ) + session_id: str = proto.Field( + proto.STRING, + number=3, + ) + type_: EventType = proto.Field( + proto.ENUM, + number=4, + enum=EventType, + ) + query: QueryDetail = proto.Field( + proto.MESSAGE, + number=5, + oneof='detail', + message=QueryDetail, + ) + event_succeeded: bool = proto.Field( + proto.BOOL, + number=6, + ) + fast_startup_enabled: bool = proto.Field( + proto.BOOL, + number=7, + ) + unassigned_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + + +class GovernanceEvent(proto.Message): + r"""Payload associated with Governance related log events. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The log message. + event_type (google.cloud.dataplex_v1.types.GovernanceEvent.EventType): + The type of the event. + entity (google.cloud.dataplex_v1.types.GovernanceEvent.Entity): + Entity resource information if the log event + is associated with a specific entity. + + This field is a member of `oneof`_ ``_entity``. + """ + class EventType(proto.Enum): + r"""Type of governance log event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + RESOURCE_IAM_POLICY_UPDATE (1): + Resource IAM policy update event. + BIGQUERY_TABLE_CREATE (2): + BigQuery table create event. + BIGQUERY_TABLE_UPDATE (3): + BigQuery table update event. + BIGQUERY_TABLE_DELETE (4): + BigQuery table delete event. + BIGQUERY_CONNECTION_CREATE (5): + BigQuery connection create event. + BIGQUERY_CONNECTION_UPDATE (6): + BigQuery connection update event. + BIGQUERY_CONNECTION_DELETE (7): + BigQuery connection delete event. + BIGQUERY_TAXONOMY_CREATE (10): + BigQuery taxonomy created. + BIGQUERY_POLICY_TAG_CREATE (11): + BigQuery policy tag created. + BIGQUERY_POLICY_TAG_DELETE (12): + BigQuery policy tag deleted. + BIGQUERY_POLICY_TAG_SET_IAM_POLICY (13): + BigQuery set iam policy for policy tag. + ACCESS_POLICY_UPDATE (14): + Access policy update event. + GOVERNANCE_RULE_MATCHED_RESOURCES (15): + Number of resources matched with particular + Query. + GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS (16): + Rule processing exceeds the allowed limit. + GOVERNANCE_RULE_ERRORS (17): + Rule processing errors. + GOVERNANCE_RULE_PROCESSING (18): + Governance rule processing Event. + """ + EVENT_TYPE_UNSPECIFIED = 0 + RESOURCE_IAM_POLICY_UPDATE = 1 + BIGQUERY_TABLE_CREATE = 2 + BIGQUERY_TABLE_UPDATE = 3 + BIGQUERY_TABLE_DELETE = 4 + BIGQUERY_CONNECTION_CREATE = 5 + BIGQUERY_CONNECTION_UPDATE = 6 + BIGQUERY_CONNECTION_DELETE = 7 + BIGQUERY_TAXONOMY_CREATE = 10 + BIGQUERY_POLICY_TAG_CREATE = 11 + BIGQUERY_POLICY_TAG_DELETE = 12 + BIGQUERY_POLICY_TAG_SET_IAM_POLICY = 13 + ACCESS_POLICY_UPDATE = 14 + GOVERNANCE_RULE_MATCHED_RESOURCES = 15 + GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS = 16 + GOVERNANCE_RULE_ERRORS = 17 + GOVERNANCE_RULE_PROCESSING = 18 + + class Entity(proto.Message): + r"""Information about Entity resource that the log event is + associated with. + + Attributes: + entity (str): + The Entity resource the log event is associated with. + Format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}`` + entity_type (google.cloud.dataplex_v1.types.GovernanceEvent.Entity.EntityType): + Type of entity. + """ + class EntityType(proto.Enum): + r"""Type of entity. + + Values: + ENTITY_TYPE_UNSPECIFIED (0): + An unspecified Entity type. + TABLE (1): + Table entity type. + FILESET (2): + Fileset entity type. + """ + ENTITY_TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + entity: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: 'GovernanceEvent.Entity.EntityType' = proto.Field( + proto.ENUM, + number=2, + enum='GovernanceEvent.Entity.EntityType', + ) + + message: str = proto.Field( + proto.STRING, + number=1, + ) + event_type: EventType = proto.Field( + proto.ENUM, + number=2, + enum=EventType, + ) + entity: Entity = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=Entity, + ) + + +class DataScanEvent(proto.Message): + r"""These messages contain information about the execution of a + datascan. The monitored resource is 'DataScan' + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_source (str): + The data source of the data scan + job_id (str): + The identifier of the specific data scan job + this log entry is for. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job started to + run. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the data scan job finished. + type_ (google.cloud.dataplex_v1.types.DataScanEvent.ScanType): + The type of the data scan. + state (google.cloud.dataplex_v1.types.DataScanEvent.State): + The status of the data scan job. + message (str): + The message describing the data scan job + event. + spec_version (str): + A version identifier of the spec which was + used to execute this job. + trigger (google.cloud.dataplex_v1.types.DataScanEvent.Trigger): + The trigger type of the data scan job. + scope (google.cloud.dataplex_v1.types.DataScanEvent.Scope): + The scope of the data scan (e.g. full, + incremental). + data_profile (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileResult): + Data profile result for data profile type + data scan. + + This field is a member of `oneof`_ ``result``. + data_quality (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityResult): + Data quality result for data quality type + data scan. + + This field is a member of `oneof`_ ``result``. + data_profile_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileAppliedConfigs): + Applied configs for data profile type data + scan. + + This field is a member of `oneof`_ ``appliedConfigs``. + data_quality_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityAppliedConfigs): + Applied configs for data quality type data + scan. + + This field is a member of `oneof`_ ``appliedConfigs``. + post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): + The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + The status of publishing the data scan as + Dataplex Universal Catalog metadata. + """ + class ScanType(proto.Enum): + r"""The type of the data scan. + + Values: + SCAN_TYPE_UNSPECIFIED (0): + An unspecified data scan type. + DATA_PROFILE (1): + Data scan for data profile. + DATA_QUALITY (2): + Data scan for data quality. + DATA_DISCOVERY (4): + Data scan for data discovery. + """ + SCAN_TYPE_UNSPECIFIED = 0 + DATA_PROFILE = 1 + DATA_QUALITY = 2 + DATA_DISCOVERY = 4 + + class State(proto.Enum): + r"""The job state of the data scan. + + Values: + STATE_UNSPECIFIED (0): + Unspecified job state. + STARTED (1): + Data scan job started. + SUCCEEDED (2): + Data scan job successfully completed. + FAILED (3): + Data scan job was unsuccessful. + CANCELLED (4): + Data scan job was cancelled. + CREATED (5): + Data scan job was created. + """ + STATE_UNSPECIFIED = 0 + STARTED = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLED = 4 + CREATED = 5 + + class Trigger(proto.Enum): + r"""The trigger type for the data scan. + + Values: + TRIGGER_UNSPECIFIED (0): + An unspecified trigger type. + ON_DEMAND (1): + Data scan triggers on demand. + SCHEDULE (2): + Data scan triggers as per schedule. + """ + TRIGGER_UNSPECIFIED = 0 + ON_DEMAND = 1 + SCHEDULE = 2 + + class Scope(proto.Enum): + r"""The scope of job for the data scan. + + Values: + SCOPE_UNSPECIFIED (0): + An unspecified scope type. + FULL (1): + Data scan runs on all of the data. + INCREMENTAL (2): + Data scan runs on incremental data. + """ + SCOPE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + + class DataProfileResult(proto.Message): + r"""Data profile result for data scan job. + + Attributes: + row_count (int): + The count of rows processed in the data scan + job. + """ + + row_count: int = proto.Field( + proto.INT64, + number=1, + ) + + class DataQualityResult(proto.Message): + r"""Data quality result for data scan job. + + Attributes: + row_count (int): + The count of rows processed in the data scan + job. + passed (bool): + Whether the data quality result was ``pass`` or not. + dimension_passed (MutableMapping[str, bool]): + The result of each dimension for data quality result. The + key of the map is the name of the dimension. The value is + the bool value depicting whether the dimension result was + ``pass`` or not. + score (float): + The table-level data quality score for the data scan job. + + The data quality score ranges between [0, 100] (up to two + decimal points). + dimension_score (MutableMapping[str, float]): + The score of each dimension for data quality result. The key + of the map is the name of the dimension. The value is the + data quality score for the dimension. + + The score ranges between [0, 100] (up to two decimal + points). + column_score (MutableMapping[str, float]): + The score of each column scanned in the data scan job. The + key of the map is the name of the column. The value is the + data quality score for the column. + + The score ranges between [0, 100] (up to two decimal + points). + """ + + row_count: int = proto.Field( + proto.INT64, + number=1, + ) + passed: bool = proto.Field( + proto.BOOL, + number=2, + ) + dimension_passed: MutableMapping[str, bool] = proto.MapField( + proto.STRING, + proto.BOOL, + number=3, + ) + score: float = proto.Field( + proto.FLOAT, + number=4, + ) + dimension_score: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.FLOAT, + number=5, + ) + column_score: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.FLOAT, + number=6, + ) + + class DataProfileAppliedConfigs(proto.Message): + r"""Applied configs for data profile type data scan job. + + Attributes: + sampling_percent (float): + The percentage of the records selected from the dataset for + DataScan. + + - Value ranges between 0.0 and 100.0. + - Value 0.0 or 100.0 imply that sampling was not applied. + row_filter_applied (bool): + Boolean indicating whether a row filter was + applied in the DataScan job. + column_filter_applied (bool): + Boolean indicating whether a column filter + was applied in the DataScan job. + """ + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=1, + ) + row_filter_applied: bool = proto.Field( + proto.BOOL, + number=2, + ) + column_filter_applied: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class DataQualityAppliedConfigs(proto.Message): + r"""Applied configs for data quality type data scan job. + + Attributes: + sampling_percent (float): + The percentage of the records selected from the dataset for + DataScan. + + - Value ranges between 0.0 and 100.0. + - Value 0.0 or 100.0 imply that sampling was not applied. + row_filter_applied (bool): + Boolean indicating whether a row filter was + applied in the DataScan job. + """ + + sampling_percent: float = proto.Field( + proto.FLOAT, + number=1, + ) + row_filter_applied: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class PostScanActionsResult(proto.Message): + r"""Post scan actions result for data scan job. + + Attributes: + bigquery_export_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult): + The result of BigQuery export post scan + action. + """ + + class BigQueryExportResult(proto.Message): + r"""The result of BigQuery export post scan action. + + Attributes: + state (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult.State): + Execution state for the BigQuery exporting. + message (str): + Additional information about the BigQuery + exporting. + """ + class State(proto.Enum): + r"""Execution state for the exporting. + + Values: + STATE_UNSPECIFIED (0): + The exporting state is unspecified. + SUCCEEDED (1): + The exporting completed successfully. + FAILED (2): + The exporting is no longer running due to an + error. + SKIPPED (3): + The exporting is skipped due to no valid scan + result to export (usually caused by scan + failed). + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + SKIPPED = 3 + + state: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( + proto.ENUM, + number=1, + enum='DataScanEvent.PostScanActionsResult.BigQueryExportResult.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + + bigquery_export_result: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult' = proto.Field( + proto.MESSAGE, + number=1, + message='DataScanEvent.PostScanActionsResult.BigQueryExportResult', + ) + + data_source: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + type_: ScanType = proto.Field( + proto.ENUM, + number=5, + enum=ScanType, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + message: str = proto.Field( + proto.STRING, + number=7, + ) + spec_version: str = proto.Field( + proto.STRING, + number=8, + ) + trigger: Trigger = proto.Field( + proto.ENUM, + number=9, + enum=Trigger, + ) + scope: Scope = proto.Field( + proto.ENUM, + number=10, + enum=Scope, + ) + data_profile: DataProfileResult = proto.Field( + proto.MESSAGE, + number=101, + oneof='result', + message=DataProfileResult, + ) + data_quality: DataQualityResult = proto.Field( + proto.MESSAGE, + number=102, + oneof='result', + message=DataQualityResult, + ) + data_profile_configs: DataProfileAppliedConfigs = proto.Field( + proto.MESSAGE, + number=201, + oneof='appliedConfigs', + message=DataProfileAppliedConfigs, + ) + data_quality_configs: DataQualityAppliedConfigs = proto.Field( + proto.MESSAGE, + number=202, + oneof='appliedConfigs', + message=DataQualityAppliedConfigs, + ) + post_scan_actions_result: PostScanActionsResult = proto.Field( + proto.MESSAGE, + number=11, + message=PostScanActionsResult, + ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = proto.Field( + proto.MESSAGE, + number=13, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + + +class DataQualityScanRuleResult(proto.Message): + r"""Information about the result of a data quality rule for data + quality scan. The monitored resource is 'DataScan'. + + Attributes: + job_id (str): + Identifier of the specific data scan job this + log entry is for. + data_source (str): + The data source of the data scan (e.g. + BigQuery table name). + column (str): + The column which this rule is evaluated + against. + rule_name (str): + The name of the data quality rule. + rule_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.RuleType): + The type of the data quality rule. + evalution_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.EvaluationType): + The evaluation type of the data quality rule. + rule_dimension (str): + The dimension of the data quality rule. + threshold_percent (float): + The passing threshold ([0.0, 100.0]) of the data quality + rule. + result (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.Result): + The result of the data quality rule. + evaluated_row_count (int): + The number of rows evaluated against the data quality rule. + This field is only valid for rules of PER_ROW evaluation + type. + passed_row_count (int): + The number of rows which passed a rule evaluation. This + field is only valid for rules of PER_ROW evaluation type. + null_row_count (int): + The number of rows with null values in the + specified column. + assertion_row_count (int): + The number of rows returned by the SQL + statement in a SQL assertion rule. This field is + only valid for SQL assertion rules. + """ + class RuleType(proto.Enum): + r"""The type of the data quality rule. + + Values: + RULE_TYPE_UNSPECIFIED (0): + An unspecified rule type. + NON_NULL_EXPECTATION (1): + See + [DataQualityRule.NonNullExpectation][google.cloud.dataplex.v1.DataQualityRule.NonNullExpectation]. + RANGE_EXPECTATION (2): + See + [DataQualityRule.RangeExpectation][google.cloud.dataplex.v1.DataQualityRule.RangeExpectation]. + REGEX_EXPECTATION (3): + See + [DataQualityRule.RegexExpectation][google.cloud.dataplex.v1.DataQualityRule.RegexExpectation]. + ROW_CONDITION_EXPECTATION (4): + See + [DataQualityRule.RowConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectation]. + SET_EXPECTATION (5): + See + [DataQualityRule.SetExpectation][google.cloud.dataplex.v1.DataQualityRule.SetExpectation]. + STATISTIC_RANGE_EXPECTATION (6): + See + [DataQualityRule.StatisticRangeExpectation][google.cloud.dataplex.v1.DataQualityRule.StatisticRangeExpectation]. + TABLE_CONDITION_EXPECTATION (7): + See + [DataQualityRule.TableConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.TableConditionExpectation]. + UNIQUENESS_EXPECTATION (8): + See + [DataQualityRule.UniquenessExpectation][google.cloud.dataplex.v1.DataQualityRule.UniquenessExpectation]. + SQL_ASSERTION (9): + See + [DataQualityRule.SqlAssertion][google.cloud.dataplex.v1.DataQualityRule.SqlAssertion]. + """ + RULE_TYPE_UNSPECIFIED = 0 + NON_NULL_EXPECTATION = 1 + RANGE_EXPECTATION = 2 + REGEX_EXPECTATION = 3 + ROW_CONDITION_EXPECTATION = 4 + SET_EXPECTATION = 5 + STATISTIC_RANGE_EXPECTATION = 6 + TABLE_CONDITION_EXPECTATION = 7 + UNIQUENESS_EXPECTATION = 8 + SQL_ASSERTION = 9 + + class EvaluationType(proto.Enum): + r"""The evaluation type of the data quality rule. + + Values: + EVALUATION_TYPE_UNSPECIFIED (0): + An unspecified evaluation type. + PER_ROW (1): + The rule evaluation is done at per row level. + AGGREGATE (2): + The rule evaluation is done for an aggregate + of rows. + """ + EVALUATION_TYPE_UNSPECIFIED = 0 + PER_ROW = 1 + AGGREGATE = 2 + + class Result(proto.Enum): + r"""Whether the data quality rule passed or failed. + + Values: + RESULT_UNSPECIFIED (0): + An unspecified result. + PASSED (1): + The data quality rule passed. + FAILED (2): + The data quality rule failed. + """ + RESULT_UNSPECIFIED = 0 + PASSED = 1 + FAILED = 2 + + job_id: str = proto.Field( + proto.STRING, + number=1, + ) + data_source: str = proto.Field( + proto.STRING, + number=2, + ) + column: str = proto.Field( + proto.STRING, + number=3, + ) + rule_name: str = proto.Field( + proto.STRING, + number=4, + ) + rule_type: RuleType = proto.Field( + proto.ENUM, + number=5, + enum=RuleType, + ) + evalution_type: EvaluationType = proto.Field( + proto.ENUM, + number=6, + enum=EvaluationType, + ) + rule_dimension: str = proto.Field( + proto.STRING, + number=7, + ) + threshold_percent: float = proto.Field( + proto.DOUBLE, + number=8, + ) + result: Result = proto.Field( + proto.ENUM, + number=9, + enum=Result, + ) + evaluated_row_count: int = proto.Field( + proto.INT64, + number=10, + ) + passed_row_count: int = proto.Field( + proto.INT64, + number=11, + ) + null_row_count: int = proto.Field( + proto.INT64, + number=12, + ) + assertion_row_count: int = proto.Field( + proto.INT64, + number=13, + ) + + +class BusinessGlossaryEvent(proto.Message): + r"""Payload associated with Business Glossary related log events. + + Attributes: + message (str): + The log message. + event_type (google.cloud.dataplex_v1.types.BusinessGlossaryEvent.EventType): + The type of the event. + resource (str): + Name of the resource. + """ + class EventType(proto.Enum): + r"""Type of glossary log event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + GLOSSARY_CREATE (1): + Glossary create event. + GLOSSARY_UPDATE (2): + Glossary update event. + GLOSSARY_DELETE (3): + Glossary delete event. + GLOSSARY_CATEGORY_CREATE (4): + Glossary category create event. + GLOSSARY_CATEGORY_UPDATE (5): + Glossary category update event. + GLOSSARY_CATEGORY_DELETE (6): + Glossary category delete event. + GLOSSARY_TERM_CREATE (7): + Glossary term create event. + GLOSSARY_TERM_UPDATE (8): + Glossary term update event. + GLOSSARY_TERM_DELETE (9): + Glossary term delete event. + """ + EVENT_TYPE_UNSPECIFIED = 0 + GLOSSARY_CREATE = 1 + GLOSSARY_UPDATE = 2 + GLOSSARY_DELETE = 3 + GLOSSARY_CATEGORY_CREATE = 4 + GLOSSARY_CATEGORY_UPDATE = 5 + GLOSSARY_CATEGORY_DELETE = 6 + GLOSSARY_TERM_CREATE = 7 + GLOSSARY_TERM_UPDATE = 8 + GLOSSARY_TERM_DELETE = 9 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + event_type: EventType = proto.Field( + proto.ENUM, + number=2, + enum=EventType, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + + +class EntryLinkEvent(proto.Message): + r"""Payload associated with Entry related log events. + + Attributes: + message (str): + The log message. + event_type (google.cloud.dataplex_v1.types.EntryLinkEvent.EventType): + The type of the event. + resource (str): + Name of the resource. + """ + class EventType(proto.Enum): + r"""Type of entry link log event. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + An unspecified event type. + ENTRY_LINK_CREATE (1): + EntryLink create event. + ENTRY_LINK_DELETE (2): + EntryLink delete event. + """ + EVENT_TYPE_UNSPECIFIED = 0 + ENTRY_LINK_CREATE = 1 + ENTRY_LINK_DELETE = 2 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + event_type: EventType = proto.Field( + proto.ENUM, + number=2, + enum=EventType, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py new file mode 100644 index 000000000000..481b4e41a9d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py @@ -0,0 +1,1182 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'StorageSystem', + 'CreateEntityRequest', + 'UpdateEntityRequest', + 'DeleteEntityRequest', + 'ListEntitiesRequest', + 'ListEntitiesResponse', + 'GetEntityRequest', + 'ListPartitionsRequest', + 'CreatePartitionRequest', + 'DeletePartitionRequest', + 'ListPartitionsResponse', + 'GetPartitionRequest', + 'Entity', + 'Partition', + 'Schema', + 'StorageFormat', + 'StorageAccess', + }, +) + + +class StorageSystem(proto.Enum): + r"""Identifies the cloud system that manages the data storage. + + Values: + STORAGE_SYSTEM_UNSPECIFIED (0): + Storage system unspecified. + CLOUD_STORAGE (1): + The entity data is contained within a Cloud + Storage bucket. + BIGQUERY (2): + The entity data is contained within a + BigQuery dataset. + """ + STORAGE_SYSTEM_UNSPECIFIED = 0 + CLOUD_STORAGE = 1 + BIGQUERY = 2 + + +class CreateEntityRequest(proto.Message): + r"""Create a metadata entity request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entity: 'Entity' = proto.Field( + proto.MESSAGE, + number=3, + message='Entity', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEntityRequest(proto.Message): + r"""Update a metadata entity request. + The exiting entity will be fully replaced by the entity in the + request. The entity ID is mutable. To modify the ID, use the + current entity ID in the request URL and specify the new ID in + the request body. + + Attributes: + entity (google.cloud.dataplex_v1.types.Entity): + Required. Update description. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + entity: 'Entity' = proto.Field( + proto.MESSAGE, + number=2, + message='Entity', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEntityRequest(proto.Message): + r"""Delete a metadata entity request. + + Attributes: + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + etag (str): + Required. The etag associated with the entity, which can be + retrieved with a [GetEntity][] request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntitiesRequest(proto.Message): + r"""List metadata entities request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + view (google.cloud.dataplex_v1.types.ListEntitiesRequest.EntityView): + Required. Specify the entity view to make a + partial list request. + page_size (int): + Optional. Maximum number of entities to + return. The service may return fewer than this + value. If unspecified, 100 entities will be + returned by default. The maximum value is 500; + larger values will will be truncated to 500. + page_token (str): + Optional. Page token received from a previous + ``ListEntities`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListEntities`` must match the call that + provided the page token. + filter (str): + Optional. The following filter parameters can be added to + the URL to limit the entities returned by the API: + + - Entity ID: ?filter="id=entityID" + - Asset ID: ?filter="asset=assetID" + - Data path ?filter="data_path=gs://my-bucket" + - Is HIVE compatible: ?filter="hive_compatible=true" + - Is BigQuery compatible: ?filter="bigquery_compatible=true". + """ + class EntityView(proto.Enum): + r"""Entity views. + + Values: + ENTITY_VIEW_UNSPECIFIED (0): + The default unset value. Return both table + and fileset entities if unspecified. + TABLES (1): + Only list table entities. + FILESETS (2): + Only list fileset entities. + """ + ENTITY_VIEW_UNSPECIFIED = 0 + TABLES = 1 + FILESETS = 2 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: EntityView = proto.Field( + proto.ENUM, + number=2, + enum=EntityView, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEntitiesResponse(proto.Message): + r"""List metadata entities response. + + Attributes: + entities (MutableSequence[google.cloud.dataplex_v1.types.Entity]): + Entities in the specified parent zone. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no remaining results in + the list. + """ + + @property + def raw_page(self): + return self + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEntityRequest(proto.Message): + r"""Get metadata entity request. + + Attributes: + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` + view (google.cloud.dataplex_v1.types.GetEntityRequest.EntityView): + Optional. Used to select the subset of entity information to + return. Defaults to ``BASIC``. + """ + class EntityView(proto.Enum): + r"""Entity views for get entity partial result. + + Values: + ENTITY_VIEW_UNSPECIFIED (0): + The API will default to the ``BASIC`` view. + BASIC (1): + Minimal view that does not include the + schema. + SCHEMA (2): + Include basic information and schema. + FULL (4): + Include everything. Currently, this is the + same as the SCHEMA view. + """ + ENTITY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + SCHEMA = 2 + FULL = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: EntityView = proto.Field( + proto.ENUM, + number=2, + enum=EntityView, + ) + + +class ListPartitionsRequest(proto.Message): + r"""List metadata partitions request. + + Attributes: + parent (str): + Required. The resource name of the parent entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + page_size (int): + Optional. Maximum number of partitions to + return. The service may return fewer than this + value. If unspecified, 100 partitions will be + returned by default. The maximum page size is + 500; larger values will will be truncated to + 500. + page_token (str): + Optional. Page token received from a previous + ``ListPartitions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListPartitions`` must match the call that + provided the page token. + filter (str): + Optional. Filter the partitions returned to the caller using + a key value pair expression. Supported operators and syntax: + + - logic operators: AND, OR + - comparison operators: <, >, >=, <= ,=, != + - LIKE operators: + + - The right hand of a LIKE operator supports "." and "\*" + for wildcard searches, for example "value1 LIKE + ".\ *oo.*" + + - parenthetical grouping: ( ) + + Sample filter expression: \`?filter="key1 < value1 OR key2 > + value2" + + **Notes:** + + - Keys to the left of operators are case insensitive. + - Partition results are sorted first by creation time, then + by lexicographic order. + - Up to 20 key value filter pairs are allowed, but due to + performance considerations, only the first 10 will be used + as a filter. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreatePartitionRequest(proto.Message): + r"""Create metadata partition request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + partition: 'Partition' = proto.Field( + proto.MESSAGE, + number=3, + message='Partition', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeletePartitionRequest(proto.Message): + r"""Delete metadata partition request. + + Attributes: + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. + etag (str): + Optional. The etag associated with the + partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListPartitionsResponse(proto.Message): + r"""List metadata partitions response. + + Attributes: + partitions (MutableSequence[google.cloud.dataplex_v1.types.Partition]): + Partitions under the specified parent entity. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no remaining results in + the list. + """ + + @property + def raw_page(self): + return self + + partitions: MutableSequence['Partition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Partition', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPartitionRequest(proto.Message): + r"""Get metadata partition request. + + Attributes: + name (str): + Required. The resource name of the partition: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Entity(proto.Message): + r"""Represents tables and fileset metadata contained within a + zone. + + Attributes: + name (str): + Output only. The resource name of the entity, of the form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{id}``. + display_name (str): + Optional. Display name must be shorter than + or equal to 256 characters. + description (str): + Optional. User friendly longer description + text. Must be shorter than or equal to 1024 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entity was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the entity was + last updated. + id (str): + Required. A user-provided entity ID. It is + mutable, and will be used as the published table + name. Specifying a new ID in an update entity + request will override the existing value. + The ID must contain only letters (a-z, A-Z), + numbers (0-9), and underscores, and consist of + 256 or fewer characters. + etag (str): + Optional. The etag associated with the entity, which can be + retrieved with a [GetEntity][] request. Required for update + and delete requests. + type_ (google.cloud.dataplex_v1.types.Entity.Type): + Required. Immutable. The type of entity. + asset (str): + Required. Immutable. The ID of the asset + associated with the storage location containing + the entity data. The entity must be with in the + same zone with the asset. + data_path (str): + Required. Immutable. The storage path of the entity data. + For Cloud Storage data, this is the fully-qualified path to + the entity, such as ``gs://bucket/path/to/data``. For + BigQuery data, this is the name of the table resource, such + as + ``projects/project_id/datasets/dataset_id/tables/table_id``. + data_path_pattern (str): + Optional. The set of items within the data path constituting + the data in the entity, represented as a glob path. Example: + ``gs://bucket/path/to/data/**/*.csv``. + catalog_entry (str): + Output only. The name of the associated Data + Catalog entry. + system (google.cloud.dataplex_v1.types.StorageSystem): + Required. Immutable. Identifies the storage + system of the entity data. + format_ (google.cloud.dataplex_v1.types.StorageFormat): + Required. Identifies the storage format of + the entity data. It does not apply to entities + with data stored in BigQuery. + compatibility (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus): + Output only. Metadata stores that the entity + is compatible with. + access (google.cloud.dataplex_v1.types.StorageAccess): + Output only. Identifies the access mechanism + to the entity. Not user settable. + uid (str): + Output only. System generated unique ID for + the Entity. This ID will be different if the + Entity is deleted and re-created with the same + name. + schema (google.cloud.dataplex_v1.types.Schema): + Required. The description of the data structure and layout. + The schema is not included in list responses. It is only + included in ``SCHEMA`` and ``FULL`` entity views of a + ``GetEntity`` response. + """ + class Type(proto.Enum): + r"""The type of entity. + + Values: + TYPE_UNSPECIFIED (0): + Type unspecified. + TABLE (1): + Structured and semi-structured data. + FILESET (2): + Unstructured data. + """ + TYPE_UNSPECIFIED = 0 + TABLE = 1 + FILESET = 2 + + class CompatibilityStatus(proto.Message): + r"""Provides compatibility information for various metadata + stores. + + Attributes: + hive_metastore (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): + Output only. Whether this entity is + compatible with Hive Metastore. + bigquery (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): + Output only. Whether this entity is + compatible with BigQuery. + """ + + class Compatibility(proto.Message): + r"""Provides compatibility information for a specific metadata + store. + + Attributes: + compatible (bool): + Output only. Whether the entity is compatible + and can be represented in the metadata store. + reason (str): + Output only. Provides additional detail if + the entity is incompatible with the metadata + store. + """ + + compatible: bool = proto.Field( + proto.BOOL, + number=1, + ) + reason: str = proto.Field( + proto.STRING, + number=2, + ) + + hive_metastore: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( + proto.MESSAGE, + number=1, + message='Entity.CompatibilityStatus.Compatibility', + ) + bigquery: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( + proto.MESSAGE, + number=2, + message='Entity.CompatibilityStatus.Compatibility', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + id: str = proto.Field( + proto.STRING, + number=7, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + type_: Type = proto.Field( + proto.ENUM, + number=10, + enum=Type, + ) + asset: str = proto.Field( + proto.STRING, + number=11, + ) + data_path: str = proto.Field( + proto.STRING, + number=12, + ) + data_path_pattern: str = proto.Field( + proto.STRING, + number=13, + ) + catalog_entry: str = proto.Field( + proto.STRING, + number=14, + ) + system: 'StorageSystem' = proto.Field( + proto.ENUM, + number=15, + enum='StorageSystem', + ) + format_: 'StorageFormat' = proto.Field( + proto.MESSAGE, + number=16, + message='StorageFormat', + ) + compatibility: CompatibilityStatus = proto.Field( + proto.MESSAGE, + number=19, + message=CompatibilityStatus, + ) + access: 'StorageAccess' = proto.Field( + proto.MESSAGE, + number=21, + message='StorageAccess', + ) + uid: str = proto.Field( + proto.STRING, + number=22, + ) + schema: 'Schema' = proto.Field( + proto.MESSAGE, + number=50, + message='Schema', + ) + + +class Partition(proto.Message): + r"""Represents partition metadata contained within entity + instances. + + Attributes: + name (str): + Output only. Partition values used in the HTTP URL must be + double encoded. For example, + ``url_encode(url_encode(value))`` can be used to encode + "US:CA/CA#Sunnyvale so that the request URL ends with + "/partitions/US%253ACA/CA%2523Sunnyvale". The name field in + the response retains the encoded format. + values (MutableSequence[str]): + Required. Immutable. The set of values + representing the partition, which correspond to + the partition schema defined in the parent + entity. + location (str): + Required. Immutable. The location of the entity data within + the partition, for example, + ``gs://bucket/path/to/entity/key1=value1/key2=value2``. Or + ``projects//datasets//tables/`` + etag (str): + Optional. The etag for this partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Schema(proto.Message): + r"""Schema information describing the structure and layout of the + data. + + Attributes: + user_managed (bool): + Required. Set to ``true`` if user-managed or ``false`` if + managed by Dataplex Universal Catalog. The default is + ``false`` (managed by Dataplex Universal Catalog). + + - Set to ``false``\ to enable Dataplex Universal Catalog + discovery to update the schema. including new data + discovery, schema inference, and schema evolution. Users + retain the ability to input and edit the schema. Dataplex + Universal Catalog treats schema input by the user as + though produced by a previous Dataplex Universal Catalog + discovery operation, and it will evolve the schema and + take action based on that treatment. + + - Set to ``true`` to fully manage the entity schema. This + setting guarantees that Dataplex Universal Catalog will + not change schema fields. + fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): + Optional. The sequence of fields describing data in table + entities. **Note:** BigQuery SchemaFields are immutable. + partition_fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.PartitionField]): + Optional. The sequence of fields describing + the partition structure in entities. If this + field is empty, there are no partitions within + the data. + partition_style (google.cloud.dataplex_v1.types.Schema.PartitionStyle): + Optional. The structure of paths containing + partition data within the entity. + """ + class Type(proto.Enum): + r"""Type information for fields in schemas and partition schemas. + + Values: + TYPE_UNSPECIFIED (0): + SchemaType unspecified. + BOOLEAN (1): + Boolean field. + BYTE (2): + Single byte numeric field. + INT16 (3): + 16-bit numeric field. + INT32 (4): + 32-bit numeric field. + INT64 (5): + 64-bit numeric field. + FLOAT (6): + Floating point numeric field. + DOUBLE (7): + Double precision numeric field. + DECIMAL (8): + Real value numeric field. + STRING (9): + Sequence of characters field. + BINARY (10): + Sequence of bytes field. + TIMESTAMP (11): + Date and time field. + DATE (12): + Date field. + TIME (13): + Time field. + RECORD (14): + Structured field. Nested fields that define + the structure of the map. If all nested fields + are nullable, this field represents a union. + NULL (100): + Null field that does not have values. + """ + TYPE_UNSPECIFIED = 0 + BOOLEAN = 1 + BYTE = 2 + INT16 = 3 + INT32 = 4 + INT64 = 5 + FLOAT = 6 + DOUBLE = 7 + DECIMAL = 8 + STRING = 9 + BINARY = 10 + TIMESTAMP = 11 + DATE = 12 + TIME = 13 + RECORD = 14 + NULL = 100 + + class Mode(proto.Enum): + r"""Additional qualifiers to define field semantics. + + Values: + MODE_UNSPECIFIED (0): + Mode unspecified. + REQUIRED (1): + The field has required semantics. + NULLABLE (2): + The field has optional semantics, and may be + null. + REPEATED (3): + The field has repeated (0 or more) semantics, + and is a list of values. + """ + MODE_UNSPECIFIED = 0 + REQUIRED = 1 + NULLABLE = 2 + REPEATED = 3 + + class PartitionStyle(proto.Enum): + r"""The structure of paths within the entity, which represent + partitions. + + Values: + PARTITION_STYLE_UNSPECIFIED (0): + PartitionStyle unspecified + HIVE_COMPATIBLE (1): + Partitions are hive-compatible. Examples: + ``gs://bucket/path/to/table/dt=2019-10-31/lang=en``, + ``gs://bucket/path/to/table/dt=2019-10-31/lang=en/late``. + """ + PARTITION_STYLE_UNSPECIFIED = 0 + HIVE_COMPATIBLE = 1 + + class SchemaField(proto.Message): + r"""Represents a column field within a table schema. + + Attributes: + name (str): + Required. The name of the field. Must contain + only letters, numbers and underscores, with a + maximum length of 767 characters, and must begin + with a letter or underscore. + description (str): + Optional. User friendly field description. + Must be less than or equal to 1024 characters. + type_ (google.cloud.dataplex_v1.types.Schema.Type): + Required. The type of field. + mode (google.cloud.dataplex_v1.types.Schema.Mode): + Required. Additional field semantics. + fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): + Optional. Any nested field for complex types. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + type_: 'Schema.Type' = proto.Field( + proto.ENUM, + number=3, + enum='Schema.Type', + ) + mode: 'Schema.Mode' = proto.Field( + proto.ENUM, + number=4, + enum='Schema.Mode', + ) + fields: MutableSequence['Schema.SchemaField'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='Schema.SchemaField', + ) + + class PartitionField(proto.Message): + r"""Represents a key field within the entity's partition structure. You + could have up to 20 partition fields, but only the first 10 + partitions have the filtering ability due to performance + consideration. **Note:** Partition fields are immutable. + + Attributes: + name (str): + Required. Partition field name must consist + of letters, numbers, and underscores only, with + a maximum of length of 256 characters, and must + begin with a letter or underscore.. + type_ (google.cloud.dataplex_v1.types.Schema.Type): + Required. Immutable. The type of field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Schema.Type' = proto.Field( + proto.ENUM, + number=2, + enum='Schema.Type', + ) + + user_managed: bool = proto.Field( + proto.BOOL, + number=1, + ) + fields: MutableSequence[SchemaField] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SchemaField, + ) + partition_fields: MutableSequence[PartitionField] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=PartitionField, + ) + partition_style: PartitionStyle = proto.Field( + proto.ENUM, + number=4, + enum=PartitionStyle, + ) + + +class StorageFormat(proto.Message): + r"""Describes the format of the data within its storage location. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + format_ (google.cloud.dataplex_v1.types.StorageFormat.Format): + Output only. The data format associated with + the stored data, which represents content type + values. The value is inferred from mime type. + compression_format (google.cloud.dataplex_v1.types.StorageFormat.CompressionFormat): + Optional. The compression type associated + with the stored data. If unspecified, the data + is uncompressed. + mime_type (str): + Required. The mime type descriptor for the + data. Must match the pattern {type}/{subtype}. + Supported values: + + - application/x-parquet + - application/x-avro + - application/x-orc + - application/x-tfrecord + - application/x-parquet+iceberg + - application/x-avro+iceberg + - application/x-orc+iceberg + - application/json + - application/{subtypes} + - text/csv + - text/ + - image/{image subtype} + - video/{video subtype} + - audio/{audio subtype} + csv (google.cloud.dataplex_v1.types.StorageFormat.CsvOptions): + Optional. Additional information about CSV + formatted data. + + This field is a member of `oneof`_ ``options``. + json (google.cloud.dataplex_v1.types.StorageFormat.JsonOptions): + Optional. Additional information about CSV + formatted data. + + This field is a member of `oneof`_ ``options``. + iceberg (google.cloud.dataplex_v1.types.StorageFormat.IcebergOptions): + Optional. Additional information about + iceberg tables. + + This field is a member of `oneof`_ ``options``. + """ + class Format(proto.Enum): + r"""The specific file format of the data. + + Values: + FORMAT_UNSPECIFIED (0): + Format unspecified. + PARQUET (1): + Parquet-formatted structured data. + AVRO (2): + Avro-formatted structured data. + ORC (3): + Orc-formatted structured data. + CSV (100): + Csv-formatted semi-structured data. + JSON (101): + Json-formatted semi-structured data. + IMAGE (200): + Image data formats (such as jpg and png). + AUDIO (201): + Audio data formats (such as mp3, and wav). + VIDEO (202): + Video data formats (such as mp4 and mpg). + TEXT (203): + Textual data formats (such as txt and xml). + TFRECORD (204): + TensorFlow record format. + OTHER (1000): + Data that doesn't match a specific format. + UNKNOWN (1001): + Data of an unknown format. + """ + FORMAT_UNSPECIFIED = 0 + PARQUET = 1 + AVRO = 2 + ORC = 3 + CSV = 100 + JSON = 101 + IMAGE = 200 + AUDIO = 201 + VIDEO = 202 + TEXT = 203 + TFRECORD = 204 + OTHER = 1000 + UNKNOWN = 1001 + + class CompressionFormat(proto.Enum): + r"""The specific compressed file format of the data. + + Values: + COMPRESSION_FORMAT_UNSPECIFIED (0): + CompressionFormat unspecified. Implies + uncompressed data. + GZIP (2): + GZip compressed set of files. + BZIP2 (3): + BZip2 compressed set of files. + """ + COMPRESSION_FORMAT_UNSPECIFIED = 0 + GZIP = 2 + BZIP2 = 3 + + class CsvOptions(proto.Message): + r"""Describes CSV and similar semi-structured data formats. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + Accepts "US-ASCII", "UTF-8", and "ISO-8859-1". + Defaults to UTF-8 if unspecified. + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. Defaults to 0. + delimiter (str): + Optional. The delimiter used to separate + values. Defaults to ','. + quote (str): + Optional. The character used to quote column + values. Accepts '"' (double quotation mark) or + ''' (single quotation mark). Defaults to '"' + (double quotation mark) if unspecified. + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + header_rows: int = proto.Field( + proto.INT32, + number=2, + ) + delimiter: str = proto.Field( + proto.STRING, + number=3, + ) + quote: str = proto.Field( + proto.STRING, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describes JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + Accepts "US-ASCII", "UTF-8" and "ISO-8859-1". + Defaults to UTF-8 if not specified. + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + + class IcebergOptions(proto.Message): + r"""Describes Iceberg data format. + + Attributes: + metadata_location (str): + Optional. The location of where the iceberg + metadata is present, must be within the table + path + """ + + metadata_location: str = proto.Field( + proto.STRING, + number=1, + ) + + format_: Format = proto.Field( + proto.ENUM, + number=1, + enum=Format, + ) + compression_format: CompressionFormat = proto.Field( + proto.ENUM, + number=2, + enum=CompressionFormat, + ) + mime_type: str = proto.Field( + proto.STRING, + number=3, + ) + csv: CsvOptions = proto.Field( + proto.MESSAGE, + number=10, + oneof='options', + message=CsvOptions, + ) + json: JsonOptions = proto.Field( + proto.MESSAGE, + number=11, + oneof='options', + message=JsonOptions, + ) + iceberg: IcebergOptions = proto.Field( + proto.MESSAGE, + number=12, + oneof='options', + message=IcebergOptions, + ) + + +class StorageAccess(proto.Message): + r"""Describes the access mechanism of the data within its storage + location. + + Attributes: + read (google.cloud.dataplex_v1.types.StorageAccess.AccessMode): + Output only. Describes the read access + mechanism of the data. Not user settable. + """ + class AccessMode(proto.Enum): + r"""Access Mode determines how data stored within the Entity is + read. + + Values: + ACCESS_MODE_UNSPECIFIED (0): + Access mode unspecified. + DIRECT (1): + Default. Data is accessed directly using + storage APIs. + MANAGED (2): + Data is accessed through a managed interface + using BigQuery APIs. + """ + ACCESS_MODE_UNSPECIFIED = 0 + DIRECT = 1 + MANAGED = 2 + + read: AccessMode = proto.Field( + proto.ENUM, + number=21, + enum=AccessMode, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py new file mode 100644 index 000000000000..0a6a6d6e6b8b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Trigger', + 'DataSource', + 'ScannedData', + }, +) + + +class Trigger(proto.Message): + r"""DataScan scheduling and trigger settings. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_demand (google.cloud.dataplex_v1.types.Trigger.OnDemand): + The scan runs once via ``RunDataScan`` API. + + This field is a member of `oneof`_ ``mode``. + schedule (google.cloud.dataplex_v1.types.Trigger.Schedule): + The scan is scheduled to run periodically. + + This field is a member of `oneof`_ ``mode``. + """ + + class OnDemand(proto.Message): + r"""The scan runs once via ``RunDataScan`` API. + """ + + class Schedule(proto.Message): + r"""The scan is scheduled to run periodically. + + Attributes: + cron (str): + Required. `Cron `__ + schedule for running scans periodically. + + To explicitly set a timezone in the cron tab, apply a prefix + in the cron tab: **"CRON_TZ=${IANA_TIME_ZONE}"** or + **"TZ=${IANA_TIME_ZONE}"**. The **${IANA_TIME_ZONE}** may + only be a valid string from IANA time zone database + (`wikipedia `__). + For example, ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. + + This field is required for Schedule scans. + """ + + cron: str = proto.Field( + proto.STRING, + number=1, + ) + + on_demand: OnDemand = proto.Field( + proto.MESSAGE, + number=100, + oneof='mode', + message=OnDemand, + ) + schedule: Schedule = proto.Field( + proto.MESSAGE, + number=101, + oneof='mode', + message=Schedule, + ) + + +class DataSource(proto.Message): + r"""The data source for DataScan. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + entity (str): + Immutable. The Dataplex Universal Catalog entity that + represents the data source (e.g. BigQuery table) for + DataScan, of the form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This field is a member of `oneof`_ ``source``. + resource (str): + Immutable. The service-qualified full resource name of the + cloud resource for a DataScan job to scan against. The field + could either be: Cloud Storage bucket for DataDiscoveryScan + Format: + //storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID + or BigQuery table of type "TABLE" for + DataProfileScan/DataQualityScan Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + + This field is a member of `oneof`_ ``source``. + """ + + entity: str = proto.Field( + proto.STRING, + number=100, + oneof='source', + ) + resource: str = proto.Field( + proto.STRING, + number=101, + oneof='source', + ) + + +class ScannedData(proto.Message): + r"""The data scanned during processing (e.g. in incremental + DataScan) + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + incremental_field (google.cloud.dataplex_v1.types.ScannedData.IncrementalField): + The range denoted by values of an incremental + field + + This field is a member of `oneof`_ ``data_range``. + """ + + class IncrementalField(proto.Message): + r"""A data range denoted by a pair of start/end values of a + field. + + Attributes: + field (str): + Output only. The field that contains values + which monotonically increases over time (e.g. a + timestamp column). + start (str): + Output only. Value that marks the start of + the range. + end (str): + Output only. Value that marks the end of the + range. + """ + + field: str = proto.Field( + proto.STRING, + number=1, + ) + start: str = proto.Field( + proto.STRING, + number=2, + ) + end: str = proto.Field( + proto.STRING, + number=3, + ) + + incremental_field: IncrementalField = proto.Field( + proto.MESSAGE, + number=1, + oneof='data_range', + message=IncrementalField, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py new file mode 100644 index 000000000000..ad2981fa94d6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py @@ -0,0 +1,1436 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'State', + 'Lake', + 'AssetStatus', + 'Zone', + 'Action', + 'Asset', + }, +) + + +class State(proto.Enum): + r"""State of a resource. + + Values: + STATE_UNSPECIFIED (0): + State is not specified. + ACTIVE (1): + Resource is active, i.e., ready to use. + CREATING (2): + Resource is under creation. + DELETING (3): + Resource is under deletion. + ACTION_REQUIRED (4): + Resource is active but has unresolved + actions. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + ACTION_REQUIRED = 4 + + +class Lake(proto.Message): + r"""A lake is a centralized repository for managing enterprise + data across the organization distributed across many cloud + projects, and stored in a variety of storage services such as + Google Cloud Storage and BigQuery. The resources attached to a + lake are referred to as managed resources. Data within these + managed resources can be structured or unstructured. A lake + provides data admins with tools to organize, secure and manage + their data at scale, and provides data scientists and data + engineers an integrated experience to easily search, discover, + analyze and transform data and associated metadata. + + Attributes: + name (str): + Output only. The relative resource name of the lake, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the lake. This ID will be different if + the lake is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the lake was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the lake was last + updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the lake. + description (str): + Optional. Description of the lake. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the lake. + service_account (str): + Output only. Service account associated with + this lake. This service account must be + authorized to access or operate on resources + managed by the lake. + metastore (google.cloud.dataplex_v1.types.Lake.Metastore): + Optional. Settings to manage lake and + Dataproc Metastore service instance association. + asset_status (google.cloud.dataplex_v1.types.AssetStatus): + Output only. Aggregated status of the + underlying assets of the lake. + metastore_status (google.cloud.dataplex_v1.types.Lake.MetastoreStatus): + Output only. Metastore status of the lake. + """ + + class Metastore(proto.Message): + r"""Settings to manage association of Dataproc Metastore with a + lake. + + Attributes: + service (str): + Optional. A relative reference to the Dataproc Metastore + (https://cloud.google.com/dataproc-metastore/docs) service + associated with the lake: + ``projects/{project_id}/locations/{location_id}/services/{service_id}`` + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + + class MetastoreStatus(proto.Message): + r"""Status of Lake and Dataproc Metastore service instance + association. + + Attributes: + state (google.cloud.dataplex_v1.types.Lake.MetastoreStatus.State): + Current state of association. + message (str): + Additional information about the current + status. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the metastore status of + the lake. + endpoint (str): + The URI of the endpoint used to access the + Metastore service. + """ + class State(proto.Enum): + r"""Current state of association. + + Values: + STATE_UNSPECIFIED (0): + Unspecified. + NONE (1): + A Metastore service instance is not + associated with the lake. + READY (2): + A Metastore service instance is attached to + the lake. + UPDATING (3): + Attach/detach is in progress. + ERROR (4): + Attach/detach could not be done due to + errors. + """ + STATE_UNSPECIFIED = 0 + NONE = 1 + READY = 2 + UPDATING = 3 + ERROR = 4 + + state: 'Lake.MetastoreStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Lake.MetastoreStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + endpoint: str = proto.Field( + proto.STRING, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + service_account: str = proto.Field( + proto.STRING, + number=9, + ) + metastore: Metastore = proto.Field( + proto.MESSAGE, + number=102, + message=Metastore, + ) + asset_status: 'AssetStatus' = proto.Field( + proto.MESSAGE, + number=103, + message='AssetStatus', + ) + metastore_status: MetastoreStatus = proto.Field( + proto.MESSAGE, + number=104, + message=MetastoreStatus, + ) + + +class AssetStatus(proto.Message): + r"""Aggregated status of the underlying assets of a lake or zone. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + active_assets (int): + Number of active assets. + security_policy_applying_assets (int): + Number of assets that are in process of + updating the security policy on attached + resources. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + active_assets: int = proto.Field( + proto.INT32, + number=2, + ) + security_policy_applying_assets: int = proto.Field( + proto.INT32, + number=3, + ) + + +class Zone(proto.Message): + r"""A zone represents a logical group of related assets within a + lake. A zone can be used to map to organizational structure or + represent stages of data readiness from raw to curated. It + provides managing behavior that is shared or inherited by all + contained assets. + + Attributes: + name (str): + Output only. The relative resource name of the zone, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the zone. This ID will be different if + the zone is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the zone was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the zone was last + updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the zone. + description (str): + Optional. Description of the zone. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the zone. + type_ (google.cloud.dataplex_v1.types.Zone.Type): + Required. Immutable. The type of the zone. + discovery_spec (google.cloud.dataplex_v1.types.Zone.DiscoverySpec): + Optional. Specification of the discovery + feature applied to data in this zone. + resource_spec (google.cloud.dataplex_v1.types.Zone.ResourceSpec): + Required. Specification of the resources that + are referenced by the assets within this zone. + asset_status (google.cloud.dataplex_v1.types.AssetStatus): + Output only. Aggregated status of the + underlying assets of the zone. + """ + class Type(proto.Enum): + r"""Type of zone. + + Values: + TYPE_UNSPECIFIED (0): + Zone type not specified. + RAW (1): + A zone that contains data that needs further + processing before it is considered generally + ready for consumption and analytics workloads. + CURATED (2): + A zone that contains data that is considered + to be ready for broader consumption and + analytics workloads. Curated structured data + stored in Cloud Storage must conform to certain + file formats (parquet, avro and orc) and + organized in a hive-compatible directory layout. + """ + TYPE_UNSPECIFIED = 0 + RAW = 1 + CURATED = 2 + + class ResourceSpec(proto.Message): + r"""Settings for resources attached as assets within a zone. + + Attributes: + location_type (google.cloud.dataplex_v1.types.Zone.ResourceSpec.LocationType): + Required. Immutable. The location type of the + resources that are allowed to be attached to the + assets within this zone. + """ + class LocationType(proto.Enum): + r"""Location type of the resources attached to a zone. + + Values: + LOCATION_TYPE_UNSPECIFIED (0): + Unspecified location type. + SINGLE_REGION (1): + Resources that are associated with a single + region. + MULTI_REGION (2): + Resources that are associated with a + multi-region location. + """ + LOCATION_TYPE_UNSPECIFIED = 0 + SINGLE_REGION = 1 + MULTI_REGION = 2 + + location_type: 'Zone.ResourceSpec.LocationType' = proto.Field( + proto.ENUM, + number=1, + enum='Zone.ResourceSpec.LocationType', + ) + + class DiscoverySpec(proto.Message): + r"""Settings to manage the metadata discovery and publishing in a + zone. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Required. Whether discovery is enabled. + include_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to include during discovery if + only a subset of the data should considered. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + exclude_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to exclude during discovery. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + csv_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.JsonOptions): + Optional. Configuration for Json data. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running discovery periodically. Successive discovery + runs must be scheduled at least 60 minutes apart. The + default value is to run discovery every 60 minutes. + + To explicitly set a timezone to the cron tab, apply a prefix + in the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or + TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a + valid string from IANA time zone database. For example, + ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. + + This field is a member of `oneof`_ ``trigger``. + """ + + class CsvOptions(proto.Message): + r"""Describe CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter being used to + separate values. This defaults to ','. + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for CSV data. If true, all columns + will be registered as strings. + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describe JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for Json data. If true, all columns + will be registered as their primitive types + (strings, number or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + csv_options: 'Zone.DiscoverySpec.CsvOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='Zone.DiscoverySpec.CsvOptions', + ) + json_options: 'Zone.DiscoverySpec.JsonOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='Zone.DiscoverySpec.JsonOptions', + ) + schedule: str = proto.Field( + proto.STRING, + number=10, + oneof='trigger', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + type_: Type = proto.Field( + proto.ENUM, + number=9, + enum=Type, + ) + discovery_spec: DiscoverySpec = proto.Field( + proto.MESSAGE, + number=103, + message=DiscoverySpec, + ) + resource_spec: ResourceSpec = proto.Field( + proto.MESSAGE, + number=104, + message=ResourceSpec, + ) + asset_status: 'AssetStatus' = proto.Field( + proto.MESSAGE, + number=105, + message='AssetStatus', + ) + + +class Action(proto.Message): + r"""Action represents an issue requiring administrator action for + resolution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + category (google.cloud.dataplex_v1.types.Action.Category): + The category of issue associated with the + action. + issue (str): + Detailed description of the issue requiring + action. + detect_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the issue was detected. + name (str): + Output only. The relative resource name of the action, of + the form: + ``projects/{project}/locations/{location}/lakes/{lake}/actions/{action}`` + ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/actions/{action}`` + ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}/actions/{action}``. + lake (str): + Output only. The relative resource name of the lake, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + zone (str): + Output only. The relative resource name of the zone, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + asset (str): + Output only. The relative resource name of the asset, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + data_locations (MutableSequence[str]): + The list of data locations associated with this action. + Cloud Storage locations are represented as URI paths(E.g. + ``gs://bucket/table1/year=2020/month=Jan/``). BigQuery + locations refer to resource names(E.g. + ``bigquery.googleapis.com/projects/project-id/datasets/dataset-id``). + invalid_data_format (google.cloud.dataplex_v1.types.Action.InvalidDataFormat): + Details for issues related to invalid or + unsupported data formats. + + This field is a member of `oneof`_ ``details``. + incompatible_data_schema (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema): + Details for issues related to incompatible + schemas detected within data. + + This field is a member of `oneof`_ ``details``. + invalid_data_partition (google.cloud.dataplex_v1.types.Action.InvalidDataPartition): + Details for issues related to invalid or + unsupported data partition structure. + + This field is a member of `oneof`_ ``details``. + missing_data (google.cloud.dataplex_v1.types.Action.MissingData): + Details for issues related to absence of data + within managed resources. + + This field is a member of `oneof`_ ``details``. + missing_resource (google.cloud.dataplex_v1.types.Action.MissingResource): + Details for issues related to absence of a + managed resource. + + This field is a member of `oneof`_ ``details``. + unauthorized_resource (google.cloud.dataplex_v1.types.Action.UnauthorizedResource): + Details for issues related to lack of + permissions to access data resources. + + This field is a member of `oneof`_ ``details``. + failed_security_policy_apply (google.cloud.dataplex_v1.types.Action.FailedSecurityPolicyApply): + Details for issues related to applying + security policy. + + This field is a member of `oneof`_ ``details``. + invalid_data_organization (google.cloud.dataplex_v1.types.Action.InvalidDataOrganization): + Details for issues related to invalid data + arrangement. + + This field is a member of `oneof`_ ``details``. + """ + class Category(proto.Enum): + r"""The category of issues. + + Values: + CATEGORY_UNSPECIFIED (0): + Unspecified category. + RESOURCE_MANAGEMENT (1): + Resource management related issues. + SECURITY_POLICY (2): + Security policy related issues. + DATA_DISCOVERY (3): + Data and discovery related issues. + """ + CATEGORY_UNSPECIFIED = 0 + RESOURCE_MANAGEMENT = 1 + SECURITY_POLICY = 2 + DATA_DISCOVERY = 3 + + class MissingResource(proto.Message): + r"""Action details for resource references in assets that cannot + be located. + + """ + + class UnauthorizedResource(proto.Message): + r"""Action details for unauthorized resource issues raised to + indicate that the service account associated with the lake + instance is not authorized to access or manage the resource + associated with an asset. + + """ + + class FailedSecurityPolicyApply(proto.Message): + r"""Failed to apply security policy to the managed resource(s) + under a lake, zone or an asset. For a lake or zone resource, one + or more underlying assets has a failure applying security policy + to the associated managed resource. + + Attributes: + asset (str): + Resource name of one of the assets with + failing security policy application. Populated + for a lake or zone resource only. + """ + + asset: str = proto.Field( + proto.STRING, + number=1, + ) + + class InvalidDataFormat(proto.Message): + r"""Action details for invalid or unsupported data files detected + by discovery. + + Attributes: + sampled_data_locations (MutableSequence[str]): + The list of data locations sampled and used + for format/schema inference. + expected_format (str): + The expected data format of the entity. + new_format (str): + The new unexpected data format within the + entity. + """ + + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + expected_format: str = proto.Field( + proto.STRING, + number=2, + ) + new_format: str = proto.Field( + proto.STRING, + number=3, + ) + + class IncompatibleDataSchema(proto.Message): + r"""Action details for incompatible schemas detected by + discovery. + + Attributes: + table (str): + The name of the table containing invalid + data. + existing_schema (str): + The existing and expected schema of the + table. The schema is provided as a JSON + formatted structure listing columns and data + types. + new_schema (str): + The new and incompatible schema within the + table. The schema is provided as a JSON + formatted structured listing columns and data + types. + sampled_data_locations (MutableSequence[str]): + The list of data locations sampled and used + for format/schema inference. + schema_change (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema.SchemaChange): + Whether the action relates to a schema that + is incompatible or modified. + """ + class SchemaChange(proto.Enum): + r"""Whether the action relates to a schema that is incompatible + or modified. + + Values: + SCHEMA_CHANGE_UNSPECIFIED (0): + Schema change unspecified. + INCOMPATIBLE (1): + Newly discovered schema is incompatible with + existing schema. + MODIFIED (2): + Newly discovered schema has changed from + existing schema for data in a curated zone. + """ + SCHEMA_CHANGE_UNSPECIFIED = 0 + INCOMPATIBLE = 1 + MODIFIED = 2 + + table: str = proto.Field( + proto.STRING, + number=1, + ) + existing_schema: str = proto.Field( + proto.STRING, + number=2, + ) + new_schema: str = proto.Field( + proto.STRING, + number=3, + ) + sampled_data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + schema_change: 'Action.IncompatibleDataSchema.SchemaChange' = proto.Field( + proto.ENUM, + number=5, + enum='Action.IncompatibleDataSchema.SchemaChange', + ) + + class InvalidDataPartition(proto.Message): + r"""Action details for invalid or unsupported partitions detected + by discovery. + + Attributes: + expected_structure (google.cloud.dataplex_v1.types.Action.InvalidDataPartition.PartitionStructure): + The issue type of InvalidDataPartition. + """ + class PartitionStructure(proto.Enum): + r"""The expected partition structure. + + Values: + PARTITION_STRUCTURE_UNSPECIFIED (0): + PartitionStructure unspecified. + CONSISTENT_KEYS (1): + Consistent hive-style partition definition + (both raw and curated zone). + HIVE_STYLE_KEYS (2): + Hive style partition definition (curated zone + only). + """ + PARTITION_STRUCTURE_UNSPECIFIED = 0 + CONSISTENT_KEYS = 1 + HIVE_STYLE_KEYS = 2 + + expected_structure: 'Action.InvalidDataPartition.PartitionStructure' = proto.Field( + proto.ENUM, + number=1, + enum='Action.InvalidDataPartition.PartitionStructure', + ) + + class MissingData(proto.Message): + r"""Action details for absence of data detected by discovery. + """ + + class InvalidDataOrganization(proto.Message): + r"""Action details for invalid data arrangement. + """ + + category: Category = proto.Field( + proto.ENUM, + number=1, + enum=Category, + ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) + detect_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + lake: str = proto.Field( + proto.STRING, + number=6, + ) + zone: str = proto.Field( + proto.STRING, + number=7, + ) + asset: str = proto.Field( + proto.STRING, + number=8, + ) + data_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + invalid_data_format: InvalidDataFormat = proto.Field( + proto.MESSAGE, + number=10, + oneof='details', + message=InvalidDataFormat, + ) + incompatible_data_schema: IncompatibleDataSchema = proto.Field( + proto.MESSAGE, + number=11, + oneof='details', + message=IncompatibleDataSchema, + ) + invalid_data_partition: InvalidDataPartition = proto.Field( + proto.MESSAGE, + number=12, + oneof='details', + message=InvalidDataPartition, + ) + missing_data: MissingData = proto.Field( + proto.MESSAGE, + number=13, + oneof='details', + message=MissingData, + ) + missing_resource: MissingResource = proto.Field( + proto.MESSAGE, + number=14, + oneof='details', + message=MissingResource, + ) + unauthorized_resource: UnauthorizedResource = proto.Field( + proto.MESSAGE, + number=15, + oneof='details', + message=UnauthorizedResource, + ) + failed_security_policy_apply: FailedSecurityPolicyApply = proto.Field( + proto.MESSAGE, + number=21, + oneof='details', + message=FailedSecurityPolicyApply, + ) + invalid_data_organization: InvalidDataOrganization = proto.Field( + proto.MESSAGE, + number=22, + oneof='details', + message=InvalidDataOrganization, + ) + + +class Asset(proto.Message): + r"""An asset represents a cloud resource that is being managed + within a lake as a member of a zone. + + Attributes: + name (str): + Output only. The relative resource name of the asset, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the asset. This ID will be different if + the asset is deleted and re-created with the + same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the asset was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the asset was last + updated. + labels (MutableMapping[str, str]): + Optional. User defined labels for the asset. + description (str): + Optional. Description of the asset. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the asset. + resource_spec (google.cloud.dataplex_v1.types.Asset.ResourceSpec): + Required. Specification of the resource that + is referenced by this asset. + resource_status (google.cloud.dataplex_v1.types.Asset.ResourceStatus): + Output only. Status of the resource + referenced by this asset. + security_status (google.cloud.dataplex_v1.types.Asset.SecurityStatus): + Output only. Status of the security policy + applied to resource referenced by this asset. + discovery_spec (google.cloud.dataplex_v1.types.Asset.DiscoverySpec): + Optional. Specification of the discovery + feature applied to data referenced by this + asset. When this spec is left unset, the asset + will use the spec set on the parent zone. + discovery_status (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus): + Output only. Status of the discovery feature + applied to data referenced by this asset. + """ + + class SecurityStatus(proto.Message): + r"""Security policy status of the asset. Data security policy, + i.e., readers, writers & owners, should be specified in the + lake/zone/asset IAM policy. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.SecurityStatus.State): + The current state of the security policy + applied to the attached resource. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + """ + class State(proto.Enum): + r"""The state of the security policy. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + READY (1): + Security policy has been successfully applied + to the attached resource. + APPLYING (2): + Security policy is in the process of being + applied to the attached resource. + ERROR (3): + Security policy could not be applied to the + attached resource due to errors. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + APPLYING = 2 + ERROR = 3 + + state: 'Asset.SecurityStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.SecurityStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class DiscoverySpec(proto.Message): + r"""Settings to manage the metadata discovery and publishing for + an asset. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Optional. Whether discovery is enabled. + include_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to include during discovery if + only a subset of the data should considered. + For Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + exclude_patterns (MutableSequence[str]): + Optional. The list of patterns to apply for + selecting data to exclude during discovery. For + Cloud Storage bucket assets, these are + interpreted as glob patterns used to match + object names. For BigQuery dataset assets, these + are interpreted as patterns to match table + names. + csv_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.JsonOptions): + Optional. Configuration for Json data. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running discovery periodically. Successive discovery + runs must be scheduled at least 60 minutes apart. The + default value is to run discovery every 60 minutes. + + To explicitly set a timezone to the cron tab, apply a prefix + in the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or + TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a + valid string from IANA time zone database. For example, + ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. + + This field is a member of `oneof`_ ``trigger``. + """ + + class CsvOptions(proto.Message): + r"""Describe CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter being used to + separate values. This defaults to ','. + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for CSV data. If true, all columns + will be registered as strings. + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class JsonOptions(proto.Message): + r"""Describe JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + disable_type_inference (bool): + Optional. Whether to disable the inference of + data type for Json data. If true, all columns + will be registered as their primitive types + (strings, number or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + disable_type_inference: bool = proto.Field( + proto.BOOL, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + csv_options: 'Asset.DiscoverySpec.CsvOptions' = proto.Field( + proto.MESSAGE, + number=4, + message='Asset.DiscoverySpec.CsvOptions', + ) + json_options: 'Asset.DiscoverySpec.JsonOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='Asset.DiscoverySpec.JsonOptions', + ) + schedule: str = proto.Field( + proto.STRING, + number=10, + oneof='trigger', + ) + + class ResourceSpec(proto.Message): + r"""Identifies the cloud resource that is referenced by this + asset. + + Attributes: + name (str): + Immutable. Relative name of the cloud resource that contains + the data that is being managed within a lake. For example: + ``projects/{project_number}/buckets/{bucket_id}`` + ``projects/{project_number}/datasets/{dataset_id}`` + type_ (google.cloud.dataplex_v1.types.Asset.ResourceSpec.Type): + Required. Immutable. Type of resource. + read_access_mode (google.cloud.dataplex_v1.types.Asset.ResourceSpec.AccessMode): + Optional. Determines how read permissions are + handled for each asset and their associated + tables. Only available to storage buckets + assets. + """ + class Type(proto.Enum): + r"""Type of resource. + + Values: + TYPE_UNSPECIFIED (0): + Type not specified. + STORAGE_BUCKET (1): + Cloud Storage bucket. + BIGQUERY_DATASET (2): + BigQuery dataset. + """ + TYPE_UNSPECIFIED = 0 + STORAGE_BUCKET = 1 + BIGQUERY_DATASET = 2 + + class AccessMode(proto.Enum): + r"""Access Mode determines how data stored within the resource is + read. This is only applicable to storage bucket assets. + + Values: + ACCESS_MODE_UNSPECIFIED (0): + Access mode unspecified. + DIRECT (1): + Default. Data is accessed directly using + storage APIs. + MANAGED (2): + Data is accessed through a managed interface + using BigQuery APIs. + """ + ACCESS_MODE_UNSPECIFIED = 0 + DIRECT = 1 + MANAGED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Asset.ResourceSpec.Type' = proto.Field( + proto.ENUM, + number=2, + enum='Asset.ResourceSpec.Type', + ) + read_access_mode: 'Asset.ResourceSpec.AccessMode' = proto.Field( + proto.ENUM, + number=5, + enum='Asset.ResourceSpec.AccessMode', + ) + + class ResourceStatus(proto.Message): + r"""Status of the resource referenced by an asset. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.ResourceStatus.State): + The current state of the managed resource. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + managed_access_identity (str): + Output only. Service account associated with + the BigQuery Connection. + """ + class State(proto.Enum): + r"""The state of a resource. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + READY (1): + Resource does not have any errors. + ERROR (2): + Resource has errors. + """ + STATE_UNSPECIFIED = 0 + READY = 1 + ERROR = 2 + + state: 'Asset.ResourceStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.ResourceStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + managed_access_identity: str = proto.Field( + proto.STRING, + number=4, + ) + + class DiscoveryStatus(proto.Message): + r"""Status of discovery for an asset. + + Attributes: + state (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.State): + The current status of the discovery feature. + message (str): + Additional information about the current + state. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of the status. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of the last discovery run. + stats (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.Stats): + Data Stats of the asset reported by + discovery. + last_run_duration (google.protobuf.duration_pb2.Duration): + The duration of the last discovery run. + """ + class State(proto.Enum): + r"""Current state of discovery. + + Values: + STATE_UNSPECIFIED (0): + State is unspecified. + SCHEDULED (1): + Discovery for the asset is scheduled. + IN_PROGRESS (2): + Discovery for the asset is running. + PAUSED (3): + Discovery for the asset is currently paused + (e.g. due to a lack of available resources). It + will be automatically resumed. + DISABLED (5): + Discovery for the asset is disabled. + """ + STATE_UNSPECIFIED = 0 + SCHEDULED = 1 + IN_PROGRESS = 2 + PAUSED = 3 + DISABLED = 5 + + class Stats(proto.Message): + r"""The aggregated data statistics for the asset reported by + discovery. + + Attributes: + data_items (int): + The count of data items within the referenced + resource. + data_size (int): + The number of stored data bytes within the + referenced resource. + tables (int): + The count of table entities within the + referenced resource. + filesets (int): + The count of fileset entities within the + referenced resource. + """ + + data_items: int = proto.Field( + proto.INT64, + number=1, + ) + data_size: int = proto.Field( + proto.INT64, + number=2, + ) + tables: int = proto.Field( + proto.INT64, + number=3, + ) + filesets: int = proto.Field( + proto.INT64, + number=4, + ) + + state: 'Asset.DiscoveryStatus.State' = proto.Field( + proto.ENUM, + number=1, + enum='Asset.DiscoveryStatus.State', + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + stats: 'Asset.DiscoveryStatus.Stats' = proto.Field( + proto.MESSAGE, + number=6, + message='Asset.DiscoveryStatus.Stats', + ) + last_run_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=8, + enum='State', + ) + resource_spec: ResourceSpec = proto.Field( + proto.MESSAGE, + number=100, + message=ResourceSpec, + ) + resource_status: ResourceStatus = proto.Field( + proto.MESSAGE, + number=101, + message=ResourceStatus, + ) + security_status: SecurityStatus = proto.Field( + proto.MESSAGE, + number=103, + message=SecurityStatus, + ) + discovery_spec: DiscoverySpec = proto.Field( + proto.MESSAGE, + number=106, + message=DiscoverySpec, + ) + discovery_status: DiscoveryStatus = proto.Field( + proto.MESSAGE, + number=107, + message=DiscoveryStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py new file mode 100644 index 000000000000..77accbfca7d7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'ResourceAccessSpec', + 'DataAccessSpec', + }, +) + + +class ResourceAccessSpec(proto.Message): + r"""ResourceAccessSpec holds the access control configuration to + be enforced on the resources, for example, Cloud Storage bucket, + BigQuery dataset, BigQuery table. + + Attributes: + readers (MutableSequence[str]): + Optional. The format of strings follows the + pattern followed by IAM in the bindings. + user:{email}, serviceAccount:{email} + group:{email}. The set of principals to be + granted reader role on the resource. + writers (MutableSequence[str]): + Optional. The set of principals to be granted + writer role on the resource. + owners (MutableSequence[str]): + Optional. The set of principals to be granted + owner role on the resource. + """ + + readers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + writers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + owners: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DataAccessSpec(proto.Message): + r"""DataAccessSpec holds the access control configuration to be + enforced on data stored within resources (eg: rows, columns in + BigQuery Tables). When associated with data, the data is only + accessible to principals explicitly granted access through the + DataAccessSpec. Principals with access to the containing + resource are not implicitly granted access. + + Attributes: + readers (MutableSequence[str]): + Optional. The format of strings follows the + pattern followed by IAM in the bindings. + user:{email}, serviceAccount:{email} + group:{email}. The set of principals to be + granted reader role on data stored within + resources. + """ + + readers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py new file mode 100644 index 000000000000..a790937d97ce --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py @@ -0,0 +1,1395 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import tasks as gcd_tasks +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'CreateLakeRequest', + 'UpdateLakeRequest', + 'DeleteLakeRequest', + 'ListLakesRequest', + 'ListLakesResponse', + 'ListLakeActionsRequest', + 'ListActionsResponse', + 'GetLakeRequest', + 'CreateZoneRequest', + 'UpdateZoneRequest', + 'DeleteZoneRequest', + 'ListZonesRequest', + 'ListZonesResponse', + 'ListZoneActionsRequest', + 'GetZoneRequest', + 'CreateAssetRequest', + 'UpdateAssetRequest', + 'DeleteAssetRequest', + 'ListAssetsRequest', + 'ListAssetsResponse', + 'ListAssetActionsRequest', + 'GetAssetRequest', + 'OperationMetadata', + 'CreateTaskRequest', + 'UpdateTaskRequest', + 'DeleteTaskRequest', + 'ListTasksRequest', + 'ListTasksResponse', + 'GetTaskRequest', + 'GetJobRequest', + 'RunTaskRequest', + 'RunTaskResponse', + 'ListJobsRequest', + 'ListJobsResponse', + 'CancelJobRequest', + 'CreateEnvironmentRequest', + 'UpdateEnvironmentRequest', + 'DeleteEnvironmentRequest', + 'ListEnvironmentsRequest', + 'ListEnvironmentsResponse', + 'GetEnvironmentRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + }, +) + + +class CreateLakeRequest(proto.Message): + r"""Create lake request. + + Attributes: + parent (str): + Required. The resource name of the lake location, of the + form: projects/{project_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + lake_id (str): + Required. Lake identifier. This ID will be used to generate + names such as database and dataset names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the customer project / location. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Lake resource + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + lake_id: str = proto.Field( + proto.STRING, + number=2, + ) + lake: resources.Lake = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Lake, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateLakeRequest(proto.Message): + r"""Update lake request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + lake (google.cloud.dataplex_v1.types.Lake): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + lake: resources.Lake = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Lake, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteLakeRequest(proto.Message): + r"""Delete lake request. + + Attributes: + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLakesRequest(proto.Message): + r"""List lakes request. + + Attributes: + parent (str): + Required. The resource name of the lake location, of the + form: ``projects/{project_number}/locations/{location_id}`` + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. Maximum number of Lakes to return. + The service may return fewer than this value. If + unspecified, at most 10 lakes will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListLakes`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListLakes`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLakesResponse(proto.Message): + r"""List lakes response. + + Attributes: + lakes (MutableSequence[google.cloud.dataplex_v1.types.Lake]): + Lakes under the given parent location. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + lakes: MutableSequence[resources.Lake] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Lake, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListLakeActionsRequest(proto.Message): + r"""List lake actions request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListLakeActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListLakeActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListActionsResponse(proto.Message): + r"""List actions response. + + Attributes: + actions (MutableSequence[google.cloud.dataplex_v1.types.Action]): + Actions under the given parent + lake/zone/asset. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + actions: MutableSequence[resources.Action] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Action, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLakeRequest(proto.Message): + r"""Get lake request. + + Attributes: + name (str): + Required. The resource name of the lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateZoneRequest(proto.Message): + r"""Create zone request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + zone_id (str): + Required. Zone identifier. This ID will be used to generate + names such as database and dataset names when publishing + metadata to Hive Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique across all lakes from all locations in a + project. + - Must not be one of the reserved IDs (i.e. "default", + "global-temp") + zone (google.cloud.dataplex_v1.types.Zone): + Required. Zone resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + zone_id: str = proto.Field( + proto.STRING, + number=2, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Zone, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateZoneRequest(proto.Message): + r"""Update zone request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + zone (google.cloud.dataplex_v1.types.Zone): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + zone: resources.Zone = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Zone, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteZoneRequest(proto.Message): + r"""Delete zone request. + + Attributes: + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListZonesRequest(proto.Message): + r"""List zones request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of zones to return. + The service may return fewer than this value. If + unspecified, at most 10 zones will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListZones`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListZones`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListZonesResponse(proto.Message): + r"""List zones response. + + Attributes: + zones (MutableSequence[google.cloud.dataplex_v1.types.Zone]): + Zones under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + zones: MutableSequence[resources.Zone] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Zone, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListZoneActionsRequest(proto.Message): + r"""List zone actions request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListZoneActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListZoneActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetZoneRequest(proto.Message): + r"""Get zone request. + + Attributes: + name (str): + Required. The resource name of the zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAssetRequest(proto.Message): + r"""Create asset request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + asset_id (str): + Required. Asset identifier. This ID will be used to generate + names such as table names when publishing metadata to Hive + Metastore and BigQuery. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must end with a number or a letter. + - Must be between 1-63 characters. + - Must be unique within the zone. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Asset resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + asset_id: str = proto.Field( + proto.STRING, + number=2, + ) + asset: resources.Asset = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Asset, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateAssetRequest(proto.Message): + r"""Update asset request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + asset (google.cloud.dataplex_v1.types.Asset): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + asset: resources.Asset = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Asset, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteAssetRequest(proto.Message): + r"""Delete asset request. + + Attributes: + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAssetsRequest(proto.Message): + r"""List assets request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + page_size (int): + Optional. Maximum number of asset to return. + The service may return fewer than this value. If + unspecified, at most 10 assets will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListAssets`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListAssets`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAssetsResponse(proto.Message): + r"""List assets response. + + Attributes: + assets (MutableSequence[google.cloud.dataplex_v1.types.Asset]): + Asset under the given parent zone. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + assets: MutableSequence[resources.Asset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Asset, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAssetActionsRequest(proto.Message): + r"""List asset actions request. + + Attributes: + parent (str): + Required. The resource name of the parent asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + page_size (int): + Optional. Maximum number of actions to + return. The service may return fewer than this + value. If unspecified, at most 10 actions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListAssetActions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListAssetActions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetAssetRequest(proto.Message): + r"""Get asset request. + + Attributes: + name (str): + Required. The resource name of the asset: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CreateTaskRequest(proto.Message): + r"""Create task request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + task_id (str): + Required. Task identifier. + task (google.cloud.dataplex_v1.types.Task): + Required. Task resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + task: gcd_tasks.Task = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_tasks.Task, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateTaskRequest(proto.Message): + r"""Update task request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + task (google.cloud.dataplex_v1.types.Task): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + task: gcd_tasks.Task = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tasks.Task, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Delete task request. + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""List tasks request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of tasks to return. + The service may return fewer than this value. If + unspecified, at most 10 tasks will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListZones`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListZones`` + must match the call that provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListTasksResponse(proto.Message): + r"""List tasks response. + + Attributes: + tasks (MutableSequence[google.cloud.dataplex_v1.types.Task]): + Tasks under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable_locations (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + tasks: MutableSequence[gcd_tasks.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tasks.Task, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetTaskRequest(proto.Message): + r"""Get task request. + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetJobRequest(proto.Message): + r"""Get job request. + + Attributes: + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The resource name of the task: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the task. + If the map is left empty, the task will run with + existing labels from task definition. If the map + contains an entry with a new key, the same will + be added to existing set of labels. If the map + contains an entry with an existing label key in + task definition, the task will run with new + label value for that entry. Clearing an existing + label will require label value to be explicitly + set to a hyphen "-". The label value cannot be + empty. + args (MutableMapping[str, str]): + Optional. Execution spec arguments. If the + map is left empty, the task will run with + existing execution spec args from task + definition. If the map contains an entry with a + new key, the same will be added to existing set + of args. If the map contains an entry with an + existing arg key in task definition, the task + will run with new arg value for that entry. + Clearing an existing arg will require arg value + to be explicitly set to a hyphen "-". The arg + value cannot be empty. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + args: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class RunTaskResponse(proto.Message): + r""" + + Attributes: + job (google.cloud.dataplex_v1.types.Job): + Jobs created by RunTask API. + """ + + job: gcd_tasks.Job = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tasks.Job, + ) + + +class ListJobsRequest(proto.Message): + r"""List jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. + page_size (int): + Optional. Maximum number of jobs to return. + The service may return fewer than this value. If + unspecified, at most 10 jobs will be returned. + The maximum value is 1000; values above 1000 + will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous ``ListJobs`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to ``ListJobs`` + must match the call that provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListJobsResponse(proto.Message): + r"""List jobs response. + + Attributes: + jobs (MutableSequence[google.cloud.dataplex_v1.types.Job]): + Jobs under a given task. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[gcd_tasks.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tasks.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelJobRequest(proto.Message): + r"""Cancel task jobs. + + Attributes: + name (str): + Required. The resource name of the job: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEnvironmentRequest(proto.Message): + r"""Create environment request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + environment_id: str = proto.Field( + proto.STRING, + number=2, + ) + environment: analyze.Environment = proto.Field( + proto.MESSAGE, + number=3, + message=analyze.Environment, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateEnvironmentRequest(proto.Message): + r"""Update environment request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + environment: analyze.Environment = proto.Field( + proto.MESSAGE, + number=2, + message=analyze.Environment, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteEnvironmentRequest(proto.Message): + r"""Delete environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnvironmentsRequest(proto.Message): + r"""List environments request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. + page_size (int): + Optional. Maximum number of environments to + return. The service may return fewer than this + value. If unspecified, at most 10 environments + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEnvironments`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListEnvironments`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListEnvironmentsResponse(proto.Message): + r"""List environments response. + + Attributes: + environments (MutableSequence[google.cloud.dataplex_v1.types.Environment]): + Environments under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + environments: MutableSequence[analyze.Environment] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Environment, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEnvironmentRequest(proto.Message): + r"""Get environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""List sessions request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. + page_size (int): + Optional. Maximum number of sessions to + return. The service may return fewer than this + value. If unspecified, at most 10 sessions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListSessions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListSessions`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. The following ``mode`` filter is + supported to return only the sessions belonging to the + requester when the mode is USER and return sessions of all + the users when the mode is ADMIN. When no filter is sent + default to USER mode. NOTE: When the mode is ADMIN, the + requester should have + ``dataplex.environments.listAllSessions`` permission to list + all sessions, in absence of the permission, the request + fails. + + mode = ADMIN \| USER + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSessionsResponse(proto.Message): + r"""List sessions response. + + Attributes: + sessions (MutableSequence[google.cloud.dataplex_v1.types.Session]): + Sessions under a given environment. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + sessions: MutableSequence[analyze.Session] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=analyze.Session, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py new file mode 100644 index 000000000000..99cff935610f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py @@ -0,0 +1,753 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataplex_v1.types import resources +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.dataplex.v1', + manifest={ + 'Task', + 'Job', + }, +) + + +class Task(proto.Message): + r"""A task represents a user-visible job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the task, of the + form: + projects/{project_number}/locations/{location_id}/lakes/{lake_id}/ + tasks/{task_id}. + uid (str): + Output only. System generated globally unique + ID for the task. This ID will be different if + the task is deleted and re-created with the same + name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the task was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the task was last + updated. + description (str): + Optional. Description of the task. + display_name (str): + Optional. User friendly display name. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the task. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the task. + trigger_spec (google.cloud.dataplex_v1.types.Task.TriggerSpec): + Required. Spec related to how often and when + a task should be triggered. + execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): + Required. Spec related to how a task is + executed. + execution_status (google.cloud.dataplex_v1.types.Task.ExecutionStatus): + Output only. Status of the latest task + executions. + spark (google.cloud.dataplex_v1.types.Task.SparkTaskConfig): + Config related to running custom Spark tasks. + + This field is a member of `oneof`_ ``config``. + notebook (google.cloud.dataplex_v1.types.Task.NotebookTaskConfig): + Config related to running scheduled + Notebooks. + + This field is a member of `oneof`_ ``config``. + """ + + class InfrastructureSpec(proto.Message): + r"""Configuration for the underlying infrastructure used to run + workloads. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.BatchComputeResources): + Compute resources needed for a Task when + using Dataproc Serverless. + + This field is a member of `oneof`_ ``resources``. + container_image (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.ContainerImageRuntime): + Container Image Runtime Configuration. + + This field is a member of `oneof`_ ``runtime``. + vpc_network (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.VpcNetwork): + Vpc network. + + This field is a member of `oneof`_ ``network``. + """ + + class BatchComputeResources(proto.Message): + r"""Batch compute resources associated with the task. + + Attributes: + executors_count (int): + Optional. Total number of job executors. Executor Count + should be between 2 and 100. [Default=2] + max_executors_count (int): + Optional. Max configurable executors. If max_executors_count + > executors_count, then auto-scaling is enabled. Max + Executor Count should be between 2 and 1000. [Default=1000] + """ + + executors_count: int = proto.Field( + proto.INT32, + number=1, + ) + max_executors_count: int = proto.Field( + proto.INT32, + number=2, + ) + + class ContainerImageRuntime(proto.Message): + r"""Container Image Runtime Configuration used with Batch + execution. + + Attributes: + image (str): + Optional. Container image to use. + java_jars (MutableSequence[str]): + Optional. A list of Java JARS to add to the + classpath. Valid input includes Cloud Storage + URIs to Jar binaries. For example, + gs://bucket-name/my/path/to/file.jar + python_packages (MutableSequence[str]): + Optional. A list of python packages to be + installed. Valid formats include Cloud Storage + URI to a PIP installable library. For example, + gs://bucket-name/my/path/to/lib.tar.gz + properties (MutableMapping[str, str]): + Optional. Override to common configuration of open source + components installed on the Dataproc cluster. The properties + to set on daemon config files. Property keys are specified + in ``prefix:property`` format, for example + ``core:hadoop.tmp.dir``. For more information, see `Cluster + properties `__. + """ + + image: str = proto.Field( + proto.STRING, + number=1, + ) + java_jars: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + class VpcNetwork(proto.Message): + r"""Cloud VPC Network used to run the infrastructure. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. The Cloud VPC network in which the + job is run. By default, the Cloud VPC network + named Default within the project is used. + + This field is a member of `oneof`_ ``network_name``. + sub_network (str): + Optional. The Cloud VPC sub-network in which + the job is run. + + This field is a member of `oneof`_ ``network_name``. + network_tags (MutableSequence[str]): + Optional. List of network tags to apply to + the job. + """ + + network: str = proto.Field( + proto.STRING, + number=1, + oneof='network_name', + ) + sub_network: str = proto.Field( + proto.STRING, + number=2, + oneof='network_name', + ) + network_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + batch: 'Task.InfrastructureSpec.BatchComputeResources' = proto.Field( + proto.MESSAGE, + number=52, + oneof='resources', + message='Task.InfrastructureSpec.BatchComputeResources', + ) + container_image: 'Task.InfrastructureSpec.ContainerImageRuntime' = proto.Field( + proto.MESSAGE, + number=101, + oneof='runtime', + message='Task.InfrastructureSpec.ContainerImageRuntime', + ) + vpc_network: 'Task.InfrastructureSpec.VpcNetwork' = proto.Field( + proto.MESSAGE, + number=150, + oneof='network', + message='Task.InfrastructureSpec.VpcNetwork', + ) + + class TriggerSpec(proto.Message): + r"""Task scheduling and trigger settings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dataplex_v1.types.Task.TriggerSpec.Type): + Required. Immutable. Trigger type of the + user-specified Task. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The first run of the task will be after this time. + If not specified, the task will run shortly after being + submitted if ON_DEMAND and based on the schedule if + RECURRING. + disabled (bool): + Optional. Prevent the task from executing. + This does not cancel already running tasks. It + is intended to temporarily disable RECURRING + tasks. + max_retries (int): + Optional. Number of retry attempts before + aborting. Set to zero to never attempt to retry + a failed task. + schedule (str): + Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) + for running tasks periodically. To explicitly set a timezone + to the cron tab, apply a prefix in the cron tab: + "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The + ${IANA_TIME_ZONE} may only be a valid string from IANA time + zone database. For example, + ``CRON_TZ=America/New_York 1 * * * *``, or + ``TZ=America/New_York 1 * * * *``. This field is required + for RECURRING tasks. + + This field is a member of `oneof`_ ``trigger``. + """ + class Type(proto.Enum): + r"""Determines how often and when the job will run. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified trigger type. + ON_DEMAND (1): + The task runs one-time shortly after Task + Creation. + RECURRING (2): + The task is scheduled to run periodically. + """ + TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + RECURRING = 2 + + type_: 'Task.TriggerSpec.Type' = proto.Field( + proto.ENUM, + number=5, + enum='Task.TriggerSpec.Type', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + max_retries: int = proto.Field( + proto.INT32, + number=7, + ) + schedule: str = proto.Field( + proto.STRING, + number=100, + oneof='trigger', + ) + + class ExecutionSpec(proto.Message): + r"""Execution related settings, like retry and service_account. + + Attributes: + args (MutableMapping[str, str]): + Optional. The arguments to pass to the task. The args can + use placeholders of the format ${placeholder} as part of + key/value string. These will be interpolated before passing + the args to the driver. Currently supported placeholders: + + - ${task_id} + - ${job_time} To pass positional args, set the key as + TASK_ARGS. The value should be a comma-separated string of + all the positional arguments. To use a delimiter other + than comma, refer to + https://cloud.google.com/sdk/gcloud/reference/topic/escaping. + In case of other keys being present in the args, then + TASK_ARGS will be passed as the last argument. + service_account (str): + Required. Service account to use to execute a + task. If not provided, the default Compute + service account for the project is used. + project (str): + Optional. The project in which jobs are run. By default, the + project containing the Lake is used. If a project is + provided, the + [ExecutionSpec.service_account][google.cloud.dataplex.v1.Task.ExecutionSpec.service_account] + must belong to this project. + max_job_execution_lifetime (google.protobuf.duration_pb2.Duration): + Optional. The maximum duration after which + the job execution is expired. + kms_key (str): + Optional. The Cloud KMS key to use for encryption, of the + form: + ``projects/{project_number}/locations/{location_id}/keyRings/{key-ring-name}/cryptoKeys/{key-name}``. + """ + + args: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + service_account: str = proto.Field( + proto.STRING, + number=5, + ) + project: str = proto.Field( + proto.STRING, + number=7, + ) + max_job_execution_lifetime: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + kms_key: str = proto.Field( + proto.STRING, + number=9, + ) + + class SparkTaskConfig(proto.Message): + r"""User-specified config for running a Spark task. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The Cloud Storage URI of the jar file that contains the main + class. The execution args are passed in as a sequence of + named process arguments (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in ``jar_file_uris``. The execution args are + passed in as a sequence of named process arguments + (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + python_script_file (str): + The Gcloud Storage URI of the main Python file to use as the + driver. Must be a .py file. The execution args are passed in + as a sequence of named process arguments (``--key=value``). + + This field is a member of `oneof`_ ``driver``. + sql_script_file (str): + A reference to a query file. This should be the Cloud + Storage URI of the query file. The execution args are used + to declare a set of script variables (``set key="value";``). + + This field is a member of `oneof`_ ``driver``. + sql_script (str): + The query text. The execution args are used to declare a set + of script variables (``set key="value";``). + + This field is a member of `oneof`_ ``driver``. + file_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of files to be + placed in the working directory of each + executor. + archive_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of archives to + be extracted into the working directory of each + executor. Supported file types: .jar, .tar, + .tar.gz, .tgz, and .zip. + infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): + Optional. Infrastructure specification for + the execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=100, + oneof='driver', + ) + main_class: str = proto.Field( + proto.STRING, + number=101, + oneof='driver', + ) + python_script_file: str = proto.Field( + proto.STRING, + number=102, + oneof='driver', + ) + sql_script_file: str = proto.Field( + proto.STRING, + number=104, + oneof='driver', + ) + sql_script: str = proto.Field( + proto.STRING, + number=105, + oneof='driver', + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( + proto.MESSAGE, + number=6, + message='Task.InfrastructureSpec', + ) + + class NotebookTaskConfig(proto.Message): + r"""Config for running scheduled notebooks. + + Attributes: + notebook (str): + Required. Path to input notebook. This can be the Cloud + Storage URI of the notebook file or the path to a Notebook + Content. The execution args are accessible as environment + variables (``TASK_key=value``). + infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): + Optional. Infrastructure specification for + the execution. + file_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of files to be + placed in the working directory of each + executor. + archive_uris (MutableSequence[str]): + Optional. Cloud Storage URIs of archives to + be extracted into the working directory of each + executor. Supported file types: .jar, .tar, + .tar.gz, .tgz, and .zip. + """ + + notebook: str = proto.Field( + proto.STRING, + number=4, + ) + infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( + proto.MESSAGE, + number=3, + message='Task.InfrastructureSpec', + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + class ExecutionStatus(proto.Message): + r"""Status of the task execution (e.g. Jobs). + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of the status. + latest_job (google.cloud.dataplex_v1.types.Job): + Output only. latest job execution + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + latest_job: 'Job' = proto.Field( + proto.MESSAGE, + number=9, + message='Job', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + state: resources.State = proto.Field( + proto.ENUM, + number=7, + enum=resources.State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + trigger_spec: TriggerSpec = proto.Field( + proto.MESSAGE, + number=100, + message=TriggerSpec, + ) + execution_spec: ExecutionSpec = proto.Field( + proto.MESSAGE, + number=101, + message=ExecutionSpec, + ) + execution_status: ExecutionStatus = proto.Field( + proto.MESSAGE, + number=201, + message=ExecutionStatus, + ) + spark: SparkTaskConfig = proto.Field( + proto.MESSAGE, + number=300, + oneof='config', + message=SparkTaskConfig, + ) + notebook: NotebookTaskConfig = proto.Field( + proto.MESSAGE, + number=302, + oneof='config', + message=NotebookTaskConfig, + ) + + +class Job(proto.Message): + r"""A job represents an instance of a task. + + Attributes: + name (str): + Output only. The relative resource name of the job, of the + form: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. + uid (str): + Output only. System generated globally unique + ID for the job. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the job was + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the job ended. + state (google.cloud.dataplex_v1.types.Job.State): + Output only. Execution state for the job. + retry_count (int): + Output only. The number of times the job has + been retried (excluding the initial attempt). + service (google.cloud.dataplex_v1.types.Job.Service): + Output only. The underlying service running a + job. + service_job (str): + Output only. The full resource name for the + job run under a particular service. + message (str): + Output only. Additional information about the + current state. + labels (MutableMapping[str, str]): + Output only. User-defined labels for the + task. + trigger (google.cloud.dataplex_v1.types.Job.Trigger): + Output only. Job execution trigger. + execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): + Output only. Spec related to how a task is + executed. + """ + class Service(proto.Enum): + r""" + + Values: + SERVICE_UNSPECIFIED (0): + Service used to run the job is unspecified. + DATAPROC (1): + Dataproc service is used to run this job. + """ + SERVICE_UNSPECIFIED = 0 + DATAPROC = 1 + + class State(proto.Enum): + r""" + + Values: + STATE_UNSPECIFIED (0): + The job state is unknown. + RUNNING (1): + The job is running. + CANCELLING (2): + The job is cancelling. + CANCELLED (3): + The job cancellation was successful. + SUCCEEDED (4): + The job completed successfully. + FAILED (5): + The job is no longer running due to an error. + ABORTED (6): + The job was cancelled outside of Dataplex + Universal Catalog. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + FAILED = 5 + ABORTED = 6 + + class Trigger(proto.Enum): + r"""Job execution trigger. + + Values: + TRIGGER_UNSPECIFIED (0): + The trigger is unspecified. + TASK_CONFIG (1): + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. + RUN_REQUEST (2): + The job was triggered by the explicit call of + Task API. + """ + TRIGGER_UNSPECIFIED = 0 + TASK_CONFIG = 1 + RUN_REQUEST = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + retry_count: int = proto.Field( + proto.UINT32, + number=6, + ) + service: Service = proto.Field( + proto.ENUM, + number=7, + enum=Service, + ) + service_job: str = proto.Field( + proto.STRING, + number=8, + ) + message: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + trigger: Trigger = proto.Field( + proto.ENUM, + number=11, + enum=Trigger, + ) + execution_spec: 'Task.ExecutionSpec' = proto.Field( + proto.MESSAGE, + number=100, + message='Task.ExecutionSpec', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py new file mode 100644 index 000000000000..321a359c4f6b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-dataplex" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py new file mode 100644 index 000000000000..c87894b42634 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py new file mode 100644 index 000000000000..02835539a9b5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = await client.create_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py new file mode 100644 index 000000000000..2a3ed96a2236 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = client.create_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py new file mode 100644 index 000000000000..fcbc1d2bdeae --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py new file mode 100644 index 000000000000..08851757a5be --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = await client.create_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py new file mode 100644 index 000000000000..9987138e7c68 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = client.create_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py new file mode 100644 index 000000000000..770ba6a5e13f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py new file mode 100644 index 000000000000..4e63aea64920 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_category(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py new file mode 100644 index 000000000000..129f5ee68142 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_category(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py new file mode 100644 index 000000000000..ce878bcc195e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py new file mode 100644 index 000000000000..bea27d33034d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_term(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py new file mode 100644 index 000000000000..87c4ba532889 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_term(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py new file mode 100644 index 000000000000..b46e94009514 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py new file mode 100644 index 000000000000..7524e741a51d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py new file mode 100644 index 000000000000..43e3d9ec99a5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py new file mode 100644 index 000000000000..943a5534d01a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py new file mode 100644 index 000000000000..baf885229bbc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py new file mode 100644 index 000000000000..3b2368612e89 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py new file mode 100644 index 000000000000..d9dbd0f0767c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py new file mode 100644 index 000000000000..1b2469dcf0b9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py new file mode 100644 index 000000000000..14d77cb5f9a3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryCategories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py new file mode 100644 index 000000000000..357b89a4a77c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryCategories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py new file mode 100644 index 000000000000..d3a9df2e788d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py new file mode 100644 index 000000000000..7f3d1a00bc6c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py new file mode 100644 index 000000000000..21cc26ce8a41 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py new file mode 100644 index 000000000000..bc9f3931ebeb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = await client.update_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py new file mode 100644 index 000000000000..27c16af104b4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = client.update_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py new file mode 100644 index 000000000000..9e5cda83589d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py new file mode 100644 index 000000000000..f1c0183067c4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = await client.update_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py new file mode 100644 index 000000000000..f162ab6e6cd5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = client.update_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py new file mode 100644 index 000000000000..672f6636c29d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py new file mode 100644 index 000000000000..992adf2c9126 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py new file mode 100644 index 000000000000..fcdb536becdd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py new file mode 100644 index 000000000000..ebd0cd697960 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.CreateAspectTypeRequest( + parent="parent_value", + aspect_type_id="aspect_type_id_value", + aspect_type=aspect_type, + ) + + # Make the request + operation = client.create_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py new file mode 100644 index 000000000000..307179eca305 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py new file mode 100644 index 000000000000..10156ffe9405 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py new file mode 100644 index 000000000000..e6c5c4b0c6e0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + operation = client.create_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py new file mode 100644 index 000000000000..11fdd293d63e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = await client.create_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryLink_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py new file mode 100644 index 000000000000..545fc9510ac8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = client.create_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryLink_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py new file mode 100644 index 000000000000..9d08ecbcc807 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py new file mode 100644 index 000000000000..c8c93d0810d9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py new file mode 100644 index 000000000000..1f07e18303ec --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEntryTypeRequest( + parent="parent_value", + entry_type_id="entry_type_id_value", + ) + + # Make the request + operation = client.create_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py new file mode 100644 index 000000000000..176dc8f6b21a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" + metadata_job.type_ = "EXPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py new file mode 100644 index 000000000000..fffa3dca81b3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" + metadata_job.type_ = "EXPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py new file mode 100644 index 000000000000..977d70c886fe --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py new file mode 100644 index 000000000000..7bb600cd72a3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAspectTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py new file mode 100644 index 000000000000..7f75da02f2a5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py new file mode 100644 index 000000000000..0efb92609a71 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py new file mode 100644 index 000000000000..cf187e0a1e01 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py new file mode 100644 index 000000000000..5f6e273420d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryLink_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py new file mode 100644 index 000000000000..9fe8fd9e84e0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryLink_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py new file mode 100644 index 000000000000..827f2dfb7f63 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py new file mode 100644 index 000000000000..8a877e0d7180 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py new file mode 100644 index 000000000000..eaed87a6d471 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryTypeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py new file mode 100644 index 000000000000..3c98341d85fd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_aspect_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py new file mode 100644 index 000000000000..238e178abf2e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAspectTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_aspect_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py new file mode 100644 index 000000000000..ec327f90378e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py new file mode 100644 index 000000000000..6c047da8ee7a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py new file mode 100644 index 000000000000..6d552d21d845 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py new file mode 100644 index 000000000000..c60929bae482 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryLink_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py new file mode 100644 index 000000000000..5c090b4900f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryLink_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py new file mode 100644 index 000000000000..bd0bf1f995b0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py new file mode 100644 index 000000000000..4a57ddcf1cf3 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py new file mode 100644 index 000000000000..27060439c400 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_type(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py new file mode 100644 index 000000000000..d2da4af3ae6c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py new file mode 100644 index 000000000000..40fcab33f820 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py new file mode 100644 index 000000000000..8b9ae3b16b14 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAspectTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListAspectTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListAspectTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py new file mode 100644 index 000000000000..7f9ca88942bd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAspectTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListAspectTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_aspect_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAspectTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_aspect_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListAspectTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py new file mode 100644 index 000000000000..7562629a5995 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py new file mode 100644 index 000000000000..c7a9e8bceb7c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py new file mode 100644 index 000000000000..e414df47c3a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py new file mode 100644 index 000000000000..eb480ab9c3d1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entry_groups(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py new file mode 100644 index 000000000000..e6bcb19bc98e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py new file mode 100644 index 000000000000..9cc615ff9dd5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListEntryTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entry_types(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntryTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListEntryTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py new file mode 100644 index 000000000000..9a08c5162a08 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py new file mode 100644 index 000000000000..36fd4a81d24c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py new file mode 100644 index 000000000000..9aacffaf9a30 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_LookupEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py new file mode 100644 index 000000000000..f79f5f04126e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_LookupEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_lookup_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.LookupEntryRequest( + name="name_value", + entry="entry_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py new file mode 100644 index 000000000000..ba04f5e078a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_SearchEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_SearchEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py new file mode 100644 index 000000000000..04b5643e21c6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_SearchEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_search_entries(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.SearchEntriesRequest( + name="name_value", + query="query_value", + ) + + # Make the request + page_result = client.search_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_SearchEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py new file mode 100644 index 000000000000..fdaad89c0129 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateAspectType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py new file mode 100644 index 000000000000..79049dd35006 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAspectType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateAspectType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_aspect_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + aspect_type = dataplex_v1.AspectType() + aspect_type.metadata_template.name = "name_value" + aspect_type.metadata_template.type_ = "type__value" + + request = dataplex_v1.UpdateAspectTypeRequest( + aspect_type=aspect_type, + ) + + # Make the request + operation = client.update_aspect_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py new file mode 100644 index 000000000000..b1acbea5b5db --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py new file mode 100644 index 000000000000..c084f241abb9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py new file mode 100644 index 000000000000..82aa81ae58fd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry_group(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryGroupRequest( + ) + + # Make the request + operation = client.update_entry_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py new file mode 100644 index 000000000000..4d38a5ac7833 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry = dataplex_v1.Entry() + entry.entry_type = "entry_type_value" + + request = dataplex_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py new file mode 100644 index 000000000000..f19a55e6b0e6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py new file mode 100644 index 000000000000..47bd13e333ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_UpdateEntryType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entry_type(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEntryTypeRequest( + ) + + # Make the request + operation = client.update_entry_type(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_UpdateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py new file mode 100644 index 000000000000..4cc2c823341a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_CreateEncryptionConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEncryptionConfigRequest( + parent="parent_value", + encryption_config_id="encryption_config_id_value", + ) + + # Make the request + operation = client.create_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_CreateEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py new file mode 100644 index 000000000000..c170b0a8d37c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateEncryptionConfigRequest( + parent="parent_value", + encryption_config_id="encryption_config_id_value", + ) + + # Make the request + operation = client.create_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py new file mode 100644 index 000000000000..bc483ea781a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py new file mode 100644 index 000000000000..b79392c229ea --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py new file mode 100644 index 000000000000..6c00b9ee027e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_GetEncryptionConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_encryption_config(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_GetEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py new file mode 100644 index 000000000000..837fdfb77220 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_GetEncryptionConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEncryptionConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_encryption_config(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_GetEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py new file mode 100644 index 000000000000..0af63d57396e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEncryptionConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_ListEncryptionConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_encryption_configs(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEncryptionConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_encryption_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CmekService_ListEncryptionConfigs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py new file mode 100644 index 000000000000..6721d3906f0c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEncryptionConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_encryption_configs(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEncryptionConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_encryption_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py new file mode 100644 index 000000000000..bcbaaaa68e3d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEncryptionConfigRequest( + ) + + # Make the request + operation = client.update_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py new file mode 100644 index 000000000000..e35470d3489c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEncryptionConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_encryption_config(): + # Create a client + client = dataplex_v1.CmekServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateEncryptionConfigRequest( + ) + + # Make the request + operation = client.update_encryption_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py new file mode 100644 index 000000000000..ee8bca70432f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_CreateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = await client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_CreateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py new file mode 100644 index 000000000000..51ebad7acbc6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_CreateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_CreateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py new file mode 100644 index 000000000000..cb2b560d1ddf --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_DeleteContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_content(request=request) + + +# [END dataplex_v1_generated_ContentService_DeleteContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py new file mode 100644 index 000000000000..00bc415820e8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_DeleteContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + +# [END dataplex_v1_generated_ContentService_DeleteContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py new file mode 100644 index 000000000000..8f536a550208 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py new file mode 100644 index 000000000000..99d63ea78c5a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py new file mode 100644 index 000000000000..b2684491d612 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py new file mode 100644 index 000000000000..e1c27f30c89c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py new file mode 100644 index 000000000000..fee9a49cd105 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_ListContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_ContentService_ListContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py new file mode 100644 index 000000000000..45cfe1764d91 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_ListContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_ContentService_ListContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py new file mode 100644 index 000000000000..23071cd66da7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py new file mode 100644 index 000000000000..84a85d8c891e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py new file mode 100644 index 000000000000..b5b6956a100c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py new file mode 100644 index 000000000000..88e6c0a30818 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py new file mode 100644 index 000000000000..294c1dc90689 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_UpdateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = await client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_UpdateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py new file mode 100644 index 000000000000..3bd002a75b1c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_ContentService_UpdateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_ContentService_UpdateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py new file mode 100644 index 000000000000..6d7e850f7d00 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_CreateDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_CreateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py new file mode 100644 index 000000000000..8991dae05723 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_CreateDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.CreateDataScanRequest( + parent="parent_value", + data_scan=data_scan, + data_scan_id="data_scan_id_value", + ) + + # Make the request + operation = client.create_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_CreateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py new file mode 100644 index 000000000000..34ba6263ff4b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_DeleteDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_DeleteDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py new file mode 100644 index 000000000000..da530afdeae4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_DeleteDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataScanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_DeleteDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py new file mode 100644 index 000000000000..12a8addea03a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateDataQualityRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py new file mode 100644 index 000000000000..6baafa1eaa02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateDataQualityRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_generate_data_quality_rules(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GenerateDataQualityRulesRequest( + name="name_value", + ) + + # Make the request + response = client.generate_data_quality_rules(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py new file mode 100644 index 000000000000..b5c832bc848a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py new file mode 100644 index 000000000000..80598e607389 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScanJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScanJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_scan_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScanJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py new file mode 100644 index 000000000000..ddac11dcc9f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScanJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScanJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_scan_job(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScanJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py new file mode 100644 index 000000000000..be7a3b9be4c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_GetDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_GetDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py new file mode 100644 index 000000000000..c61c2370d8d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScanJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py new file mode 100644 index 000000000000..f2a0b4655a4f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScanJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_scan_jobs(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScanJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scan_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py new file mode 100644 index 000000000000..48c610c9b06c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScans_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScans_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py new file mode 100644 index 000000000000..92571d491f45 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataScans +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_ListDataScans_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_scans(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataScansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_scans(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataScanService_ListDataScans_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py new file mode 100644 index 000000000000..2fa52865fd65 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_RunDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = await client.run_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_RunDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py new file mode 100644 index 000000000000..33d96e7d6238 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_RunDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_run_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunDataScanRequest( + name="name_value", + ) + + # Make the request + response = client.run_data_scan(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_RunDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py new file mode 100644 index 000000000000..ffcae850e8d9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_UpdateDataScan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceAsyncClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_UpdateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py new file mode 100644 index 000000000000..c93c7c383986 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataScan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataScanService_UpdateDataScan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_scan(): + # Create a client + client = dataplex_v1.DataScanServiceClient() + + # Initialize request argument(s) + data_scan = dataplex_v1.DataScan() + data_scan.data_quality_spec.rules.dimension = "dimension_value" + data_scan.data.entity = "entity_value" + + request = dataplex_v1.UpdateDataScanRequest( + data_scan=data_scan, + ) + + # Make the request + operation = client.update_data_scan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataScanService_UpdateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py new file mode 100644 index 000000000000..e2b2bf71b387 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py new file mode 100644 index 000000000000..beeb5680d9e5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py new file mode 100644 index 000000000000..a3e3aa93857e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.CreateDataAttributeBindingRequest( + parent="parent_value", + data_attribute_binding_id="data_attribute_binding_id_value", + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.create_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py new file mode 100644 index 000000000000..d6152220a602 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataAttributeRequest( + parent="parent_value", + data_attribute_id="data_attribute_id_value", + ) + + # Make the request + operation = client.create_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py new file mode 100644 index 000000000000..99ce40ce34a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py new file mode 100644 index 000000000000..772a778084ad --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateDataTaxonomyRequest( + parent="parent_value", + data_taxonomy_id="data_taxonomy_id_value", + ) + + # Make the request + operation = client.create_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py new file mode 100644 index 000000000000..0e64772d9c22 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py new file mode 100644 index 000000000000..c477330499b8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py new file mode 100644 index 000000000000..e364102af74f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeBindingRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + operation = client.delete_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py new file mode 100644 index 000000000000..0c151d0cc561 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataAttributeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py new file mode 100644 index 000000000000..b436ccabc9fd --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py new file mode 100644 index 000000000000..bb970218bb82 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py new file mode 100644 index 000000000000..1ef7d2f933b4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py new file mode 100644 index 000000000000..3f4a9258be18 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py new file mode 100644 index 000000000000..1726b15817dc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute_binding(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py new file mode 100644 index 000000000000..c772287eecea --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataAttributeRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_attribute(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py new file mode 100644 index 000000000000..7001118c3814 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py new file mode 100644 index 000000000000..9171ee6b5c2c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetDataTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_taxonomy(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py new file mode 100644 index 000000000000..ddc48325c378 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributeBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py new file mode 100644 index 000000000000..a37350712c72 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributeBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_attribute_bindings(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributeBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attribute_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py new file mode 100644 index 000000000000..884af22fadca --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py new file mode 100644 index 000000000000..ad286e0bccc0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataAttributes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_attributes(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataAttributesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_attributes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py new file mode 100644 index 000000000000..a8e44196d0fe --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py new file mode 100644 index 000000000000..3199469fe760 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_data_taxonomies(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListDataTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py new file mode 100644 index 000000000000..5538cdb101c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py new file mode 100644 index 000000000000..003d919da568 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py new file mode 100644 index 000000000000..bcde182293ec --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttributeBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_attribute_binding(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + data_attribute_binding = dataplex_v1.DataAttributeBinding() + data_attribute_binding.resource = "resource_value" + + request = dataplex_v1.UpdateDataAttributeBindingRequest( + data_attribute_binding=data_attribute_binding, + ) + + # Make the request + operation = client.update_data_attribute_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py new file mode 100644 index 000000000000..72938b7a0084 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataAttribute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_attribute(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataAttributeRequest( + ) + + # Make the request + operation = client.update_data_attribute(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py new file mode 100644 index 000000000000..d27e74f03f83 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py new file mode 100644 index 000000000000..e2165abe9e32 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_data_taxonomy(): + # Create a client + client = dataplex_v1.DataTaxonomyServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateDataTaxonomyRequest( + ) + + # Make the request + operation = client.update_data_taxonomy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py new file mode 100644 index 000000000000..e0092b6956d1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_job(request=request) + + +# [END dataplex_v1_generated_DataplexService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py new file mode 100644 index 000000000000..402f8a637848 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_cancel_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_job(request=request) + + +# [END dataplex_v1_generated_DataplexService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py new file mode 100644 index 000000000000..009395cfc039 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py new file mode 100644 index 000000000000..4c973edec59f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.CreateAssetRequest( + parent="parent_value", + asset_id="asset_id_value", + asset=asset, + ) + + # Make the request + operation = client.create_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py new file mode 100644 index 000000000000..c6179d548ca0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py new file mode 100644 index 000000000000..6e83bdaa4850 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py new file mode 100644 index 000000000000..9cfc496cdf81 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py new file mode 100644 index 000000000000..424884d08a39 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateLakeRequest( + parent="parent_value", + lake_id="lake_id_value", + ) + + # Make the request + operation = client.create_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py new file mode 100644 index 000000000000..570446c72951 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py new file mode 100644 index 000000000000..059d6e9caf59 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.CreateTaskRequest( + parent="parent_value", + task_id="task_id_value", + task=task, + ) + + # Make the request + operation = client.create_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py new file mode 100644 index 000000000000..6019acc85718 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py new file mode 100644 index 000000000000..6a4caa9ec79d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_CreateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.CreateZoneRequest( + parent="parent_value", + zone_id="zone_id_value", + zone=zone, + ) + + # Make the request + operation = client.create_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_CreateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py new file mode 100644 index 000000000000..cb38f16150f4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py new file mode 100644 index 000000000000..df1f8c9727f0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteAssetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py new file mode 100644 index 000000000000..e4ddd35aff61 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py new file mode 100644 index 000000000000..bef7a8b3bbc9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py new file mode 100644 index 000000000000..b471608aca67 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py new file mode 100644 index 000000000000..e0712b0d17b8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteLakeRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py new file mode 100644 index 000000000000..770fcc16f1f4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py new file mode 100644 index 000000000000..f467f23478b7 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py new file mode 100644 index 000000000000..c312b04269ba --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py new file mode 100644 index 000000000000..c13e5cd84601 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_DeleteZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteZoneRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_DeleteZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py new file mode 100644 index 000000000000..93fe6155f85b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_asset(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py new file mode 100644 index 000000000000..92f815fcbc39 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetAssetRequest( + name="name_value", + ) + + # Make the request + response = client.get_asset(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py new file mode 100644 index 000000000000..c54409b11c2c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py new file mode 100644 index 000000000000..94f419065b91 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py new file mode 100644 index 000000000000..cd1d8e123e95 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py new file mode 100644 index 000000000000..8b332b6b6de9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_job(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py new file mode 100644 index 000000000000..b42d72569d47 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lake(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py new file mode 100644 index 000000000000..610dd6b9dabc --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetLakeRequest( + name="name_value", + ) + + # Make the request + response = client.get_lake(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py new file mode 100644 index 000000000000..e032ebe67bc5 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py new file mode 100644 index 000000000000..147f2f6893d0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py new file mode 100644 index 000000000000..99dcf4f47a02 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = await client.get_zone(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py new file mode 100644 index 000000000000..9f91127efc5f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_GetZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetZoneRequest( + name="name_value", + ) + + # Make the request + response = client.get_zone(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_GetZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py new file mode 100644 index 000000000000..8845154ecd43 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssetActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssetActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssetActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py new file mode 100644 index 000000000000..417850a8ef86 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssetActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssetActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_asset_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_asset_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssetActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py new file mode 100644 index 000000000000..78a17cdc6cd6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py new file mode 100644 index 000000000000..b12afd0684d9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_assets(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py new file mode 100644 index 000000000000..a816672d20ba --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListEnvironments_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py new file mode 100644 index 000000000000..a87c26037b1f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListEnvironments_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py new file mode 100644 index 000000000000..519c8b556f56 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py new file mode 100644 index 000000000000..231b588f3d08 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_jobs(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py new file mode 100644 index 000000000000..b3b3b90becee --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakeActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakeActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakeActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py new file mode 100644 index 000000000000..4eb7cfeacfb1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakeActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakeActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_lake_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakeActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lake_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakeActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py new file mode 100644 index 000000000000..1821ff51eb42 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py new file mode 100644 index 000000000000..30bd707e596c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLakes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListLakes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_lakes(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListLakesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lakes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListLakes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py new file mode 100644 index 000000000000..87fde35485ec --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListSessions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py new file mode 100644 index 000000000000..5e2349662e53 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListSessions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py new file mode 100644 index 000000000000..ee1bb7a358f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListTasks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py new file mode 100644 index 000000000000..8d6ea9e99d91 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListTasks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_tasks(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py new file mode 100644 index 000000000000..11d1477126da --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZoneActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZoneActions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZoneActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py new file mode 100644 index 000000000000..f254856503de --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZoneActions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZoneActions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_zone_actions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZoneActionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zone_actions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZoneActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py new file mode 100644 index 000000000000..8a3a26b0f9c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZones_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZones_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py new file mode 100644 index 000000000000..314a861f927e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListZones +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_ListZones_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_zones(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListZonesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_zones(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_DataplexService_ListZones_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py new file mode 100644 index 000000000000..ea91643d7ee1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_RunTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_RunTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py new file mode 100644 index 000000000000..cca14742d157 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_RunTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_run_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_RunTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py new file mode 100644 index 000000000000..1825dd3af6ae --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateAsset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py new file mode 100644 index 000000000000..57a672651f46 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAsset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateAsset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_asset(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + asset = dataplex_v1.Asset() + asset.resource_spec.type_ = "BIGQUERY_DATASET" + + request = dataplex_v1.UpdateAssetRequest( + asset=asset, + ) + + # Make the request + operation = client.update_asset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py new file mode 100644 index 000000000000..07d1d4985472 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py new file mode 100644 index 000000000000..b77bfdcee105 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py new file mode 100644 index 000000000000..3d049c83d769 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateLake_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py new file mode 100644 index 000000000000..334145584703 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLake +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateLake_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_lake(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateLakeRequest( + ) + + # Make the request + operation = client.update_lake(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py new file mode 100644 index 000000000000..1f02d2952d62 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateTask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py new file mode 100644 index 000000000000..a7afc9397dc0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateTask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_task(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + task = dataplex_v1.Task() + task.spark.main_jar_file_uri = "main_jar_file_uri_value" + task.trigger_spec.schedule = "schedule_value" + task.trigger_spec.type_ = "RECURRING" + task.execution_spec.service_account = "service_account_value" + + request = dataplex_v1.UpdateTaskRequest( + task=task, + ) + + # Make the request + operation = client.update_task(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py new file mode 100644 index 000000000000..80f26b89522d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateZone_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py new file mode 100644 index 000000000000..ec2683f4dc9f --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateZone +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_DataplexService_UpdateZone_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_zone(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + zone = dataplex_v1.Zone() + zone.type_ = "CURATED" + zone.resource_spec.location_type = "MULTI_REGION" + + request = dataplex_v1.UpdateZoneRequest( + zone=zone, + ) + + # Make the request + operation = client.update_zone(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_DataplexService_UpdateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py new file mode 100644 index 000000000000..0464145a93b4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreateEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = await client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py new file mode 100644 index 000000000000..fc9b2a1e1368 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreateEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py new file mode 100644 index 000000000000..9ac3bd423a53 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreatePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = await client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreatePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py new file mode 100644 index 000000000000..68759f52656c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_CreatePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value1', 'values_value2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_CreatePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py new file mode 100644 index 000000000000..0c1ddc783b69 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeleteEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + await client.delete_entity(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeleteEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py new file mode 100644 index 000000000000..eda8649ce8c9 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeleteEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeleteEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py new file mode 100644 index 000000000000..3b92c18589ef --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeletePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_partition(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeletePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py new file mode 100644 index 000000000000..0caa639adb6c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_DeletePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + +# [END dataplex_v1_generated_MetadataService_DeletePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py new file mode 100644 index 000000000000..8c6bfd66e0c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py new file mode 100644 index 000000000000..91400bfc9e7d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntityRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py new file mode 100644 index 000000000000..bdd927959bcb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetPartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetPartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py new file mode 100644 index 000000000000..b5fb1a52ecf6 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_GetPartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetPartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_GetPartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py new file mode 100644 index 000000000000..39c288b6c43e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListEntities_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py new file mode 100644 index 000000000000..f03b686a369a --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_entities(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEntitiesRequest( + parent="parent_value", + view="FILESETS", + ) + + # Make the request + page_result = client.list_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListEntities_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py new file mode 100644 index 000000000000..cbd82d4433a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListPartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListPartitions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py new file mode 100644 index 000000000000..2854a45a94a1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_ListPartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_partitions(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListPartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_MetadataService_ListPartitions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py new file mode 100644 index 000000000000..6f6d1525a4c4 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_UpdateEntity_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = await client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_UpdateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py new file mode 100644 index 000000000000..fa71e34d815b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_MetadataService_UpdateEntity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_MetadataService_UpdateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json new file mode 100644 index 000000000000..a12a3f36de1e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -0,0 +1,20224 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dataplex.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dataplex", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" + }, + { + "name": "entry_link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" + }, + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" + }, + { + "name": "entry_link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" + }, + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "entry_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_type" + }, + "description": "Sample for CreateEntryType", + "file": "dataplex_v1_generated_catalog_service_create_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "entry_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_entry_type" + }, + "description": "Sample for CreateEntryType", + "file": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "dataplex_v1_generated_catalog_service_create_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "dataplex_v1_generated_catalog_service_create_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" + }, + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" + }, + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_entry_type" + }, + "description": "Sample for DeleteEntryType", + "file": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_entry_type" + }, + "description": "Sample for DeleteEntryType", + "file": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "dataplex_v1_generated_catalog_service_delete_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "dataplex_v1_generated_catalog_service_delete_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" + }, + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" + }, + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" + }, + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" + }, + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_aspect_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListAspectTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager", + "shortName": "list_aspect_types" + }, + "description": "Sample for ListAspectTypes", + "file": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListAspectTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager", + "shortName": "list_aspect_types" + }, + "description": "Sample for ListAspectTypes", + "file": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "dataplex_v1_generated_catalog_service_list_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "dataplex_v1_generated_catalog_service_list_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_groups", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager", + "shortName": "list_entry_types" + }, + "description": "Sample for ListEntryTypes", + "file": "dataplex_v1_generated_catalog_service_list_entry_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListEntryTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager", + "shortName": "list_entry_types" + }, + "description": "Sample for ListEntryTypes", + "file": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.lookup_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "dataplex_v1_generated_catalog_service_lookup_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_lookup_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.search_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "SearchEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager", + "shortName": "search_entries" + }, + "description": "Sample for SearchEntries", + "file": "dataplex_v1_generated_catalog_service_search_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_search_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.search_entries", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "SearchEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager", + "shortName": "search_entries" + }, + "description": "Sample for SearchEntries", + "file": "dataplex_v1_generated_catalog_service_search_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_search_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_aspect_type" + }, + "description": "Sample for UpdateAspectType", + "file": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_aspect_type" + }, + "description": "Sample for UpdateAspectType", + "file": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_update_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_entry_type" + }, + "description": "Sample for UpdateEntryType", + "file": "dataplex_v1_generated_catalog_service_update_entry_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntryType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" + }, + { + "name": "entry_type", + "type": "google.cloud.dataplex_v1.types.EntryType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_entry_type" + }, + "description": "Sample for UpdateEntryType", + "file": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "dataplex_v1_generated_catalog_service_update_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.dataplex_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "dataplex_v1_generated_catalog_service_update_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_update_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", + "shortName": "CmekServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.create_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.CreateEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "CreateEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "encryption_config", + "type": "google.cloud.dataplex_v1.types.EncryptionConfig" + }, + { + "name": "encryption_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_encryption_config" + }, + "description": "Sample for CreateEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_create_encryption_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_CreateEncryptionConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_create_encryption_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceClient", + "shortName": "CmekServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceClient.create_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.CreateEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "CreateEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "encryption_config", + "type": "google.cloud.dataplex_v1.types.EncryptionConfig" + }, + { + "name": "encryption_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_encryption_config" + }, + "description": "Sample for CreateEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_create_encryption_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_create_encryption_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", + "shortName": "CmekServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.delete_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.DeleteEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "DeleteEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_encryption_config" + }, + "description": "Sample for DeleteEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_delete_encryption_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_delete_encryption_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceClient", + "shortName": "CmekServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceClient.delete_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.DeleteEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "DeleteEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_encryption_config" + }, + "description": "Sample for DeleteEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", + "shortName": "CmekServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.get_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.GetEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "GetEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEncryptionConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EncryptionConfig", + "shortName": "get_encryption_config" + }, + "description": "Sample for GetEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_get_encryption_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_GetEncryptionConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_get_encryption_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceClient", + "shortName": "CmekServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceClient.get_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.GetEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "GetEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEncryptionConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EncryptionConfig", + "shortName": "get_encryption_config" + }, + "description": "Sample for GetEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_get_encryption_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_GetEncryptionConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_get_encryption_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", + "shortName": "CmekServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.list_encryption_configs", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.ListEncryptionConfigs", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "ListEncryptionConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsAsyncPager", + "shortName": "list_encryption_configs" + }, + "description": "Sample for ListEncryptionConfigs", + "file": "dataplex_v1_generated_cmek_service_list_encryption_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_ListEncryptionConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_list_encryption_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceClient", + "shortName": "CmekServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceClient.list_encryption_configs", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.ListEncryptionConfigs", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "ListEncryptionConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsPager", + "shortName": "list_encryption_configs" + }, + "description": "Sample for ListEncryptionConfigs", + "file": "dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", + "shortName": "CmekServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.update_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.UpdateEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "UpdateEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest" + }, + { + "name": "encryption_config", + "type": "google.cloud.dataplex_v1.types.EncryptionConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_encryption_config" + }, + "description": "Sample for UpdateEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_update_encryption_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_update_encryption_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CmekServiceClient", + "shortName": "CmekServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CmekServiceClient.update_encryption_config", + "method": { + "fullName": "google.cloud.dataplex.v1.CmekService.UpdateEncryptionConfig", + "service": { + "fullName": "google.cloud.dataplex.v1.CmekService", + "shortName": "CmekService" + }, + "shortName": "UpdateEncryptionConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest" + }, + { + "name": "encryption_config", + "type": "google.cloud.dataplex_v1.types.EncryptionConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_encryption_config" + }, + "description": "Sample for UpdateEncryptionConfig", + "file": "dataplex_v1_generated_cmek_service_update_encryption_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_cmek_service_update_encryption_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.create_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "CreateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "create_content" + }, + "description": "Sample for CreateContent", + "file": "dataplex_v1_generated_content_service_create_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_CreateContent_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_create_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.create_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "CreateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "create_content" + }, + "description": "Sample for CreateContent", + "file": "dataplex_v1_generated_content_service_create_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_CreateContent_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_create_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.delete_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "DeleteContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_content" + }, + "description": "Sample for DeleteContent", + "file": "dataplex_v1_generated_content_service_delete_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_delete_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.delete_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "DeleteContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_content" + }, + "description": "Sample for DeleteContent", + "file": "dataplex_v1_generated_content_service_delete_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_delete_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "get_content" + }, + "description": "Sample for GetContent", + "file": "dataplex_v1_generated_content_service_get_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "get_content" + }, + "description": "Sample for GetContent", + "file": "dataplex_v1_generated_content_service_get_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "dataplex_v1_generated_content_service_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "dataplex_v1_generated_content_service_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.list_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "ListContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager", + "shortName": "list_content" + }, + "description": "Sample for ListContent", + "file": "dataplex_v1_generated_content_service_list_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_ListContent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_list_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.list_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "ListContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListContentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager", + "shortName": "list_content" + }, + "description": "Sample for ListContent", + "file": "dataplex_v1_generated_content_service_list_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_ListContent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_list_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "dataplex_v1_generated_content_service_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "dataplex_v1_generated_content_service_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "dataplex_v1_generated_content_service_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", + "shortName": "ContentServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.update_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "UpdateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "update_content" + }, + "description": "Sample for UpdateContent", + "file": "dataplex_v1_generated_content_service_update_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_update_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.ContentServiceClient", + "shortName": "ContentServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.ContentServiceClient.update_content", + "method": { + "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", + "service": { + "fullName": "google.cloud.dataplex.v1.ContentService", + "shortName": "ContentService" + }, + "shortName": "UpdateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" + }, + { + "name": "content", + "type": "google.cloud.dataplex_v1.types.Content" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Content", + "shortName": "update_content" + }, + "description": "Sample for UpdateContent", + "file": "dataplex_v1_generated_content_service_update_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_content_service_update_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.create_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "CreateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "data_scan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_scan" + }, + "description": "Sample for CreateDataScan", + "file": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "CreateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "data_scan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_scan" + }, + "description": "Sample for CreateDataScan", + "file": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.delete_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "DeleteDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_scan" + }, + "description": "Sample for DeleteDataScan", + "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "DeleteDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_scan" + }, + "description": "Sample for DeleteDataScan", + "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.generate_data_quality_rules", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GenerateDataQualityRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", + "shortName": "generate_data_quality_rules" + }, + "description": "Sample for GenerateDataQualityRules", + "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GenerateDataQualityRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", + "shortName": "generate_data_quality_rules" + }, + "description": "Sample for GenerateDataQualityRules", + "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScanJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScanJob", + "shortName": "get_data_scan_job" + }, + "description": "Sample for GetDataScanJob", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScanJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScanJob", + "shortName": "get_data_scan_job" + }, + "description": "Sample for GetDataScanJob", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScan", + "shortName": "get_data_scan" + }, + "description": "Sample for GetDataScan", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "GetDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataScan", + "shortName": "get_data_scan" + }, + "description": "Sample for GetDataScan", + "file": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scan_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScanJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager", + "shortName": "list_data_scan_jobs" + }, + "description": "Sample for ListDataScanJobs", + "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScanJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager", + "shortName": "list_data_scan_jobs" + }, + "description": "Sample for ListDataScanJobs", + "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scans", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager", + "shortName": "list_data_scans" + }, + "description": "Sample for ListDataScans", + "file": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "ListDataScans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager", + "shortName": "list_data_scans" + }, + "description": "Sample for ListDataScans", + "file": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.run_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "RunDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", + "shortName": "run_data_scan" + }, + "description": "Sample for RunDataScan", + "file": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "RunDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", + "shortName": "run_data_scan" + }, + "description": "Sample for RunDataScan", + "file": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", + "shortName": "DataScanServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.update_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "UpdateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_scan" + }, + "description": "Sample for UpdateDataScan", + "file": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", + "shortName": "DataScanServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", + "method": { + "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", + "service": { + "fullName": "google.cloud.dataplex.v1.DataScanService", + "shortName": "DataScanService" + }, + "shortName": "UpdateDataScan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" + }, + { + "name": "data_scan", + "type": "google.cloud.dataplex_v1.types.DataScan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_scan" + }, + "description": "Sample for UpdateDataScan", + "file": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "data_attribute_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_attribute_binding" + }, + "description": "Sample for CreateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "data_attribute_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_attribute_binding" + }, + "description": "Sample for CreateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "data_attribute_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_attribute" + }, + "description": "Sample for CreateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "data_attribute_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_attribute" + }, + "description": "Sample for CreateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "data_taxonomy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_taxonomy" + }, + "description": "Sample for CreateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "CreateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "data_taxonomy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_taxonomy" + }, + "description": "Sample for CreateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_attribute_binding" + }, + "description": "Sample for DeleteDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_attribute_binding" + }, + "description": "Sample for DeleteDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_attribute" + }, + "description": "Sample for DeleteDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_attribute" + }, + "description": "Sample for DeleteDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_taxonomy" + }, + "description": "Sample for DeleteDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "DeleteDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_taxonomy" + }, + "description": "Sample for DeleteDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", + "shortName": "get_data_attribute_binding" + }, + "description": "Sample for GetDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", + "shortName": "get_data_attribute_binding" + }, + "description": "Sample for GetDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttribute", + "shortName": "get_data_attribute" + }, + "description": "Sample for GetDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataAttribute", + "shortName": "get_data_attribute" + }, + "description": "Sample for GetDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", + "shortName": "get_data_taxonomy" + }, + "description": "Sample for GetDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "GetDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", + "shortName": "get_data_taxonomy" + }, + "description": "Sample for GetDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attribute_bindings", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributeBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager", + "shortName": "list_data_attribute_bindings" + }, + "description": "Sample for ListDataAttributeBindings", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributeBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager", + "shortName": "list_data_attribute_bindings" + }, + "description": "Sample for ListDataAttributeBindings", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attributes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager", + "shortName": "list_data_attributes" + }, + "description": "Sample for ListDataAttributes", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager", + "shortName": "list_data_attributes" + }, + "description": "Sample for ListDataAttributes", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_taxonomies", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager", + "shortName": "list_data_taxonomies" + }, + "description": "Sample for ListDataTaxonomies", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "ListDataTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager", + "shortName": "list_data_taxonomies" + }, + "description": "Sample for ListDataTaxonomies", + "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_attribute_binding" + }, + "description": "Sample for UpdateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttributeBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" + }, + { + "name": "data_attribute_binding", + "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_attribute_binding" + }, + "description": "Sample for UpdateDataAttributeBinding", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_attribute" + }, + "description": "Sample for UpdateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataAttribute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" + }, + { + "name": "data_attribute", + "type": "google.cloud.dataplex_v1.types.DataAttribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_attribute" + }, + "description": "Sample for UpdateDataAttribute", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", + "shortName": "DataTaxonomyServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_taxonomy" + }, + "description": "Sample for UpdateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", + "shortName": "DataTaxonomyServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", + "method": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", + "service": { + "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", + "shortName": "DataTaxonomyService" + }, + "shortName": "UpdateDataTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" + }, + { + "name": "data_taxonomy", + "type": "google.cloud.dataplex_v1.types.DataTaxonomy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_taxonomy" + }, + "description": "Sample for UpdateDataTaxonomy", + "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "dataplex_v1_generated_dataplex_service_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.cancel_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "asset_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_asset" + }, + "description": "Sample for CreateAsset", + "file": "dataplex_v1_generated_dataplex_service_create_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "asset_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_asset" + }, + "description": "Sample for CreateAsset", + "file": "dataplex_v1_generated_dataplex_service_create_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "environment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "environment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "lake_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_lake" + }, + "description": "Sample for CreateLake", + "file": "dataplex_v1_generated_dataplex_service_create_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "lake_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_lake" + }, + "description": "Sample for CreateLake", + "file": "dataplex_v1_generated_dataplex_service_create_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "task_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_task" + }, + "description": "Sample for CreateTask", + "file": "dataplex_v1_generated_dataplex_service_create_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "task_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_task" + }, + "description": "Sample for CreateTask", + "file": "dataplex_v1_generated_dataplex_service_create_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "dataplex_v1_generated_dataplex_service_create_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "zone_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_zone" + }, + "description": "Sample for CreateZone", + "file": "dataplex_v1_generated_dataplex_service_create_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_create_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_asset" + }, + "description": "Sample for DeleteAsset", + "file": "dataplex_v1_generated_dataplex_service_delete_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_asset" + }, + "description": "Sample for DeleteAsset", + "file": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dataplex_v1_generated_dataplex_service_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_lake" + }, + "description": "Sample for DeleteLake", + "file": "dataplex_v1_generated_dataplex_service_delete_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_lake" + }, + "description": "Sample for DeleteLake", + "file": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_task" + }, + "description": "Sample for DeleteTask", + "file": "dataplex_v1_generated_dataplex_service_delete_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_task" + }, + "description": "Sample for DeleteTask", + "file": "dataplex_v1_generated_dataplex_service_delete_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "dataplex_v1_generated_dataplex_service_delete_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_zone" + }, + "description": "Sample for DeleteZone", + "file": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Asset", + "shortName": "get_asset" + }, + "description": "Sample for GetAsset", + "file": "dataplex_v1_generated_dataplex_service_get_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetAssetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Asset", + "shortName": "get_asset" + }, + "description": "Sample for GetAsset", + "file": "dataplex_v1_generated_dataplex_service_get_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dataplex_v1_generated_dataplex_service_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dataplex_v1_generated_dataplex_service_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataplex_v1_generated_dataplex_service_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_job", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataplex_v1_generated_dataplex_service_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Lake", + "shortName": "get_lake" + }, + "description": "Sample for GetLake", + "file": "dataplex_v1_generated_dataplex_service_get_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetLake_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetLakeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Lake", + "shortName": "get_lake" + }, + "description": "Sample for GetLake", + "file": "dataplex_v1_generated_dataplex_service_get_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetLake_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Task", + "shortName": "get_task" + }, + "description": "Sample for GetTask", + "file": "dataplex_v1_generated_dataplex_service_get_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Task", + "shortName": "get_task" + }, + "description": "Sample for GetTask", + "file": "dataplex_v1_generated_dataplex_service_get_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "dataplex_v1_generated_dataplex_service_get_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetZone_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "GetZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetZoneRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Zone", + "shortName": "get_zone" + }, + "description": "Sample for GetZone", + "file": "dataplex_v1_generated_dataplex_service_get_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_GetZone_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_get_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_asset_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssetActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager", + "shortName": "list_asset_actions" + }, + "description": "Sample for ListAssetActions", + "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssetActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager", + "shortName": "list_asset_actions" + }, + "description": "Sample for ListAssetActions", + "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_assets", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager", + "shortName": "list_assets" + }, + "description": "Sample for ListAssets", + "file": "dataplex_v1_generated_dataplex_service_list_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_assets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_assets", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager", + "shortName": "list_assets" + }, + "description": "Sample for ListAssets", + "file": "dataplex_v1_generated_dataplex_service_list_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_assets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dataplex_v1_generated_dataplex_service_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_environments", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dataplex_v1_generated_dataplex_service_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataplex_v1_generated_dataplex_service_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lake_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakeActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager", + "shortName": "list_lake_actions" + }, + "description": "Sample for ListLakeActions", + "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakeActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager", + "shortName": "list_lake_actions" + }, + "description": "Sample for ListLakeActions", + "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lakes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager", + "shortName": "list_lakes" + }, + "description": "Sample for ListLakes", + "file": "dataplex_v1_generated_dataplex_service_list_lakes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lakes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListLakes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListLakesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager", + "shortName": "list_lakes" + }, + "description": "Sample for ListLakes", + "file": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_sessions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "dataplex_v1_generated_dataplex_service_list_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_tasks", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager", + "shortName": "list_tasks" + }, + "description": "Sample for ListTasks", + "file": "dataplex_v1_generated_dataplex_service_list_tasks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_tasks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListTasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListTasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager", + "shortName": "list_tasks" + }, + "description": "Sample for ListTasks", + "file": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zone_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZoneActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager", + "shortName": "list_zone_actions" + }, + "description": "Sample for ListZoneActions", + "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZoneActions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager", + "shortName": "list_zone_actions" + }, + "description": "Sample for ListZoneActions", + "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zones", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "dataplex_v1_generated_dataplex_service_list_zones_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZones_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zones_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zones", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "ListZones" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListZonesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager", + "shortName": "list_zones" + }, + "description": "Sample for ListZones", + "file": "dataplex_v1_generated_dataplex_service_list_zones_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_ListZones_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_list_zones_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.run_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "RunTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", + "shortName": "run_task" + }, + "description": "Sample for RunTask", + "file": "dataplex_v1_generated_dataplex_service_run_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_RunTask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_run_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.run_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "RunTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.RunTaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", + "shortName": "run_task" + }, + "description": "Sample for RunTask", + "file": "dataplex_v1_generated_dataplex_service_run_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_RunTask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_run_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_asset" + }, + "description": "Sample for UpdateAsset", + "file": "dataplex_v1_generated_dataplex_service_update_asset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_asset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_asset", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateAsset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" + }, + { + "name": "asset", + "type": "google.cloud.dataplex_v1.types.Asset" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_asset" + }, + "description": "Sample for UpdateAsset", + "file": "dataplex_v1_generated_dataplex_service_update_asset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_asset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_environment", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "environment", + "type": "google.cloud.dataplex_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dataplex_v1_generated_dataplex_service_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_lake" + }, + "description": "Sample for UpdateLake", + "file": "dataplex_v1_generated_dataplex_service_update_lake_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_lake_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_lake", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateLake" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" + }, + { + "name": "lake", + "type": "google.cloud.dataplex_v1.types.Lake" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_lake" + }, + "description": "Sample for UpdateLake", + "file": "dataplex_v1_generated_dataplex_service_update_lake_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_lake_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_task" + }, + "description": "Sample for UpdateTask", + "file": "dataplex_v1_generated_dataplex_service_update_task_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_task_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_task", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateTask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" + }, + { + "name": "task", + "type": "google.cloud.dataplex_v1.types.Task" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_task" + }, + "description": "Sample for UpdateTask", + "file": "dataplex_v1_generated_dataplex_service_update_task_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_task_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", + "shortName": "DataplexServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "dataplex_v1_generated_dataplex_service_update_zone_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_zone_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", + "shortName": "DataplexServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_zone", + "method": { + "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", + "service": { + "fullName": "google.cloud.dataplex.v1.DataplexService", + "shortName": "DataplexService" + }, + "shortName": "UpdateZone" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" + }, + { + "name": "zone", + "type": "google.cloud.dataplex_v1.types.Zone" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_zone" + }, + "description": "Sample for UpdateZone", + "file": "dataplex_v1_generated_dataplex_service_update_zone_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_dataplex_service_update_zone_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity", + "type": "google.cloud.dataplex_v1.types.Entity" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "create_entity" + }, + "description": "Sample for CreateEntity", + "file": "dataplex_v1_generated_metadata_service_create_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity", + "type": "google.cloud.dataplex_v1.types.Entity" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "create_entity" + }, + "description": "Sample for CreateEntity", + "file": "dataplex_v1_generated_metadata_service_create_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreatePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partition", + "type": "google.cloud.dataplex_v1.types.Partition" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "create_partition" + }, + "description": "Sample for CreatePartition", + "file": "dataplex_v1_generated_metadata_service_create_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "CreatePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partition", + "type": "google.cloud.dataplex_v1.types.Partition" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "create_partition" + }, + "description": "Sample for CreatePartition", + "file": "dataplex_v1_generated_metadata_service_create_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_create_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeleteEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_entity" + }, + "description": "Sample for DeleteEntity", + "file": "dataplex_v1_generated_metadata_service_delete_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeleteEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_entity" + }, + "description": "Sample for DeleteEntity", + "file": "dataplex_v1_generated_metadata_service_delete_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeletePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_partition" + }, + "description": "Sample for DeletePartition", + "file": "dataplex_v1_generated_metadata_service_delete_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "DeletePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_partition" + }, + "description": "Sample for DeletePartition", + "file": "dataplex_v1_generated_metadata_service_delete_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_delete_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "get_entity" + }, + "description": "Sample for GetEntity", + "file": "dataplex_v1_generated_metadata_service_get_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "get_entity" + }, + "description": "Sample for GetEntity", + "file": "dataplex_v1_generated_metadata_service_get_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_entity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetPartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "get_partition" + }, + "description": "Sample for GetPartition", + "file": "dataplex_v1_generated_metadata_service_get_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_partition", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "GetPartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Partition", + "shortName": "get_partition" + }, + "description": "Sample for GetPartition", + "file": "dataplex_v1_generated_metadata_service_get_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_get_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_entities", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager", + "shortName": "list_entities" + }, + "description": "Sample for ListEntities", + "file": "dataplex_v1_generated_metadata_service_list_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_entities", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager", + "shortName": "list_entities" + }, + "description": "Sample for ListEntities", + "file": "dataplex_v1_generated_metadata_service_list_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_partitions", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListPartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager", + "shortName": "list_partitions" + }, + "description": "Sample for ListPartitions", + "file": "dataplex_v1_generated_metadata_service_list_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "ListPartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager", + "shortName": "list_partitions" + }, + "description": "Sample for ListPartitions", + "file": "dataplex_v1_generated_metadata_service_list_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_list_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.update_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "update_entity" + }, + "description": "Sample for UpdateEntity", + "file": "dataplex_v1_generated_metadata_service_update_entity_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_update_entity_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.update_entity", + "method": { + "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", + "service": { + "fullName": "google.cloud.dataplex.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Entity", + "shortName": "update_entity" + }, + "description": "Sample for UpdateEntity", + "file": "dataplex_v1_generated_metadata_service_update_entity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_metadata_service_update_entity_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py new file mode 100644 index 000000000000..e054db378114 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py @@ -0,0 +1,298 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataplexCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', ), + 'cancel_metadata_job': ('name', ), + 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), + 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), + 'create_content': ('parent', 'content', 'validate_only', ), + 'create_data_attribute': ('parent', 'data_attribute_id', 'data_attribute', 'validate_only', ), + 'create_data_attribute_binding': ('parent', 'data_attribute_binding_id', 'data_attribute_binding', 'validate_only', ), + 'create_data_scan': ('parent', 'data_scan', 'data_scan_id', 'validate_only', ), + 'create_data_taxonomy': ('parent', 'data_taxonomy_id', 'data_taxonomy', 'validate_only', ), + 'create_encryption_config': ('parent', 'encryption_config_id', 'encryption_config', ), + 'create_entity': ('parent', 'entity', 'validate_only', ), + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), + 'create_entry_link': ('parent', 'entry_link_id', 'entry_link', ), + 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), + 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), + 'create_glossary': ('parent', 'glossary_id', 'glossary', 'validate_only', ), + 'create_glossary_category': ('parent', 'category_id', 'category', ), + 'create_glossary_term': ('parent', 'term_id', 'term', ), + 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), + 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), + 'create_partition': ('parent', 'partition', 'validate_only', ), + 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), + 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), + 'delete_aspect_type': ('name', 'etag', ), + 'delete_asset': ('name', ), + 'delete_content': ('name', ), + 'delete_data_attribute': ('name', 'etag', ), + 'delete_data_attribute_binding': ('name', 'etag', ), + 'delete_data_scan': ('name', 'force', ), + 'delete_data_taxonomy': ('name', 'etag', ), + 'delete_encryption_config': ('name', 'etag', ), + 'delete_entity': ('name', 'etag', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'etag', ), + 'delete_entry_link': ('name', ), + 'delete_entry_type': ('name', 'etag', ), + 'delete_environment': ('name', ), + 'delete_glossary': ('name', 'etag', ), + 'delete_glossary_category': ('name', ), + 'delete_glossary_term': ('name', ), + 'delete_lake': ('name', ), + 'delete_partition': ('name', 'etag', ), + 'delete_task': ('name', ), + 'delete_zone': ('name', ), + 'generate_data_quality_rules': ('name', ), + 'get_aspect_type': ('name', ), + 'get_asset': ('name', ), + 'get_content': ('name', 'view', ), + 'get_data_attribute': ('name', ), + 'get_data_attribute_binding': ('name', ), + 'get_data_scan': ('name', 'view', ), + 'get_data_scan_job': ('name', 'view', ), + 'get_data_taxonomy': ('name', ), + 'get_encryption_config': ('name', ), + 'get_entity': ('name', 'view', ), + 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), + 'get_entry_group': ('name', ), + 'get_entry_link': ('name', ), + 'get_entry_type': ('name', ), + 'get_environment': ('name', ), + 'get_glossary': ('name', ), + 'get_glossary_category': ('name', ), + 'get_glossary_term': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_job': ('name', ), + 'get_lake': ('name', ), + 'get_metadata_job': ('name', ), + 'get_partition': ('name', ), + 'get_task': ('name', ), + 'get_zone': ('name', ), + 'list_aspect_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_asset_actions': ('parent', 'page_size', 'page_token', ), + 'list_assets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_content': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_data_attribute_bindings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_attributes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_scan_jobs': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_data_scans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_taxonomies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_encryption_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_entities': ('parent', 'view', 'page_size', 'page_token', 'filter', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_terms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_jobs': ('parent', 'page_size', 'page_token', ), + 'list_lake_actions': ('parent', 'page_size', 'page_token', ), + 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_zone_actions': ('parent', 'page_size', 'page_token', ), + 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'lookup_entry': ('name', 'entry', 'view', 'aspect_types', 'paths', ), + 'run_data_scan': ('name', ), + 'run_task': ('name', 'labels', 'args', ), + 'search_entries': ('name', 'query', 'page_size', 'page_token', 'order_by', 'scope', 'semantic_search', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_aspect_type': ('aspect_type', 'update_mask', 'validate_only', ), + 'update_asset': ('update_mask', 'asset', 'validate_only', ), + 'update_content': ('update_mask', 'content', 'validate_only', ), + 'update_data_attribute': ('update_mask', 'data_attribute', 'validate_only', ), + 'update_data_attribute_binding': ('update_mask', 'data_attribute_binding', 'validate_only', ), + 'update_data_scan': ('data_scan', 'update_mask', 'validate_only', ), + 'update_data_taxonomy': ('update_mask', 'data_taxonomy', 'validate_only', ), + 'update_encryption_config': ('encryption_config', 'update_mask', ), + 'update_entity': ('entity', 'validate_only', ), + 'update_entry': ('entry', 'update_mask', 'allow_missing', 'delete_missing_aspects', 'aspect_keys', ), + 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), + 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), + 'update_environment': ('update_mask', 'environment', 'validate_only', ), + 'update_glossary': ('glossary', 'update_mask', 'validate_only', ), + 'update_glossary_category': ('category', 'update_mask', ), + 'update_glossary_term': ('term', 'update_mask', ), + 'update_lake': ('update_mask', 'lake', 'validate_only', ), + 'update_task': ('update_mask', 'task', 'validate_only', ), + 'update_zone': ('update_mask', 'zone', 'validate_only', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataplexCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dataplex client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/setup.py b/owl-bot-staging/google-cloud-dataplex/v1/setup.py new file mode 100644 index 000000000000..df5e1552f733 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/setup.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dataplex' + + +description = "Google Cloud Dataplex API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/dataplex/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..2010e549cceb --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..56affbd9bd75 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py new file mode 100644 index 000000000000..191773d5572d --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py new file mode 100644 index 000000000000..17416fe5528c --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py @@ -0,0 +1,14069 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.business_glossary_service import BusinessGlossaryServiceAsyncClient +from google.cloud.dataplex_v1.services.business_glossary_service import BusinessGlossaryServiceClient +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.services.business_glossary_service import transports +from google.cloud.dataplex_v1.types import business_glossary +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(None) is None + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + BusinessGlossaryServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessGlossaryServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BusinessGlossaryServiceClient._get_client_cert_source(None, False) is None + assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert BusinessGlossaryServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) +@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert BusinessGlossaryServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "always") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + assert BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert BusinessGlossaryServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert BusinessGlossaryServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert BusinessGlossaryServiceClient._get_universe_domain(None, None) == BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + BusinessGlossaryServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BusinessGlossaryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BusinessGlossaryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), +]) +def test_business_glossary_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.BusinessGlossaryServiceGrpcTransport, "grpc"), + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BusinessGlossaryServiceRestTransport, "rest"), +]) +def test_business_glossary_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), +]) +def test_business_glossary_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_business_glossary_service_client_get_transport_class(): + transport = BusinessGlossaryServiceClient.get_transport_class() + available_transports = [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceRestTransport, + ] + assert transport in available_transports + + transport = BusinessGlossaryServiceClient.get_transport_class("grpc") + assert transport == transports.BusinessGlossaryServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc"), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest"), +]) +@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) +@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +def test_business_glossary_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BusinessGlossaryServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BusinessGlossaryServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", "true"), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", "false"), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", "true"), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) +@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_business_glossary_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient +]) +@mock.patch.object(BusinessGlossaryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BusinessGlossaryServiceClient)) +@mock.patch.object(BusinessGlossaryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BusinessGlossaryServiceAsyncClient)) +def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient +]) +@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) +@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +def test_business_glossary_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc"), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest"), +]) +def test_business_glossary_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", grpc_helpers), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", None), +]) +def test_business_glossary_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_business_glossary_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = BusinessGlossaryServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", grpc_helpers), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_business_glossary_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryRequest, + dict, +]) +def test_create_glossary(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryRequest( + parent='parent_value', + glossary_id='glossary_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryRequest( + parent='parent_value', + glossary_id='glossary_id_value', + ) + +def test_create_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + request = {} + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_glossary in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_glossary] = mock_rpc + + request = {} + await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_glossary_async_from_dict(): + await test_create_glossary_async(request_type=dict) + +def test_create_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary( + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].glossary + mock_val = business_glossary.Glossary(name='name_value') + assert arg == mock_val + arg = args[0].glossary_id + mock_val = 'glossary_id_value' + assert arg == mock_val + + +def test_create_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + +@pytest.mark.asyncio +async def test_create_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary( + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].glossary + mock_val = business_glossary.Glossary(name='name_value') + assert arg == mock_val + arg = args[0].glossary_id + mock_val = 'glossary_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryRequest, + dict, +]) +def test_update_glossary(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryRequest( + ) + +def test_update_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc + request = {} + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_glossary in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_glossary] = mock_rpc + + request = {} + await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_glossary_async_from_dict(): + await test_update_glossary_async(request_type=dict) + +def test_update_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryRequest() + + request.glossary.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'glossary.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryRequest() + + request.glossary.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'glossary.name=name_value', + ) in kw['metadata'] + + +def test_update_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary( + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].glossary + mock_val = business_glossary.Glossary(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary( + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].glossary + mock_val = business_glossary.Glossary(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryRequest, + dict, +]) +def test_delete_glossary(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + request = {} + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_glossary in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_glossary] = mock_rpc + + request = {} + await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_glossary_async_from_dict(): + await test_delete_glossary_async(request_type=dict) + +def test_delete_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryRequest, + dict, +]) +def test_get_glossary(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + term_count=1088, + category_count=1510, + etag='etag_value', + ) + response = client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == 'etag_value' + + +def test_get_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryRequest( + name='name_value', + ) + +def test_get_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + request = {} + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_glossary in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_glossary] = mock_rpc + + request = {} + await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + term_count=1088, + category_count=1510, + etag='etag_value', + )) + response = await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_glossary_async_from_dict(): + await test_get_glossary_async(request_type=dict) + +def test_get_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + call.return_value = business_glossary.Glossary() + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary()) + await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + business_glossary.GetGlossaryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary( + business_glossary.GetGlossaryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossariesRequest, + dict, +]) +def test_list_glossaries(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_glossaries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossariesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_glossaries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossariesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_glossaries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossaries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + request = {} + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossaries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_glossaries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_glossaries] = mock_rpc + + request = {} + await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossaries_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossariesRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_glossaries_async_from_dict(): + await test_list_glossaries_async(request_type=dict) + +def test_list_glossaries_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossariesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + call.return_value = business_glossary.ListGlossariesResponse() + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_glossaries_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossariesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse()) + await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_glossaries_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossaries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_glossaries_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_glossaries_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossaries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_glossaries_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent='parent_value', + ) + + +def test_list_glossaries_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token='def', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_glossaries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.Glossary) + for i in results) +def test_list_glossaries_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token='def', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossaries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_glossaries_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token='def', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossaries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.Glossary) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossaries_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token='def', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryCategoryRequest, + dict, +]) +def test_create_glossary_category(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_create_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryCategoryRequest( + parent='parent_value', + category_id='category_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryCategoryRequest( + parent='parent_value', + category_id='category_id_value', + ) + +def test_create_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary_category] = mock_rpc + request = {} + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_glossary_category in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_glossary_category] = mock_rpc + + request = {} + await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryCategoryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_create_glossary_category_async_from_dict(): + await test_create_glossary_category_async(request_type=dict) + +def test_create_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryCategoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryCategoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary_category( + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name='name_value') + assert arg == mock_val + arg = args[0].category_id + mock_val = 'category_id_value' + assert arg == mock_val + + +def test_create_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + +@pytest.mark.asyncio +async def test_create_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary_category( + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name='name_value') + assert arg == mock_val + arg = args[0].category_id + mock_val = 'category_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, +]) +def test_update_glossary_category(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_update_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryCategoryRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryCategoryRequest( + ) + +def test_update_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary_category] = mock_rpc + request = {} + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_glossary_category in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_glossary_category] = mock_rpc + + request = {} + await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryCategoryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_update_glossary_category_async_from_dict(): + await test_update_glossary_category_async(request_type=dict) + +def test_update_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryCategoryRequest() + + request.category.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'category.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryCategoryRequest() + + request.category.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'category.name=name_value', + ) in kw['metadata'] + + +def test_update_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary_category( + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary_category( + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, +]) +def test_delete_glossary_category(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryCategoryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryCategoryRequest( + name='name_value', + ) + +def test_delete_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary_category] = mock_rpc + request = {} + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_glossary_category in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_glossary_category] = mock_rpc + + request = {} + await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryCategoryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_glossary_category_async_from_dict(): + await test_delete_glossary_category_async(request_type=dict) + +def test_delete_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryCategoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + call.return_value = None + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryCategoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary_category( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary_category( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryCategoryRequest, + dict, +]) +def test_get_glossary_category(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_get_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryCategoryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryCategoryRequest( + name='name_value', + ) + +def test_get_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary_category] = mock_rpc + request = {} + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_glossary_category in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_glossary_category] = mock_rpc + + request = {} + await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryCategoryRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_get_glossary_category_async_from_dict(): + await test_get_glossary_category_async(request_type=dict) + +def test_get_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryCategoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryCategoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary_category( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary_category( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossaryCategoriesRequest, + dict, +]) +def test_list_glossary_categories(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryCategoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_glossary_categories_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossaryCategoriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_glossary_categories(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossaryCategoriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_glossary_categories_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossary_categories in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossary_categories] = mock_rpc + request = {} + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_glossary_categories in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_glossary_categories] = mock_rpc + + request = {} + await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossary_categories_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossaryCategoriesRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryCategoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_from_dict(): + await test_list_glossary_categories_async(request_type=dict) + +def test_list_glossary_categories_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryCategoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_glossary_categories_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryCategoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse()) + await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_glossary_categories_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossary_categories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_glossary_categories_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_glossary_categories_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossary_categories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_glossary_categories_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent='parent_value', + ) + + +def test_list_glossary_categories_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token='def', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_glossary_categories(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) + for i in results) +def test_list_glossary_categories_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token='def', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossary_categories(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token='def', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossary_categories(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token='def', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossary_categories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryTermRequest, + dict, +]) +def test_create_glossary_term(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_create_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryTermRequest( + parent='parent_value', + term_id='term_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryTermRequest( + parent='parent_value', + term_id='term_id_value', + ) + +def test_create_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary_term] = mock_rpc + request = {} + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_glossary_term in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_glossary_term] = mock_rpc + + request = {} + await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryTermRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_create_glossary_term_async_from_dict(): + await test_create_glossary_term_async(request_type=dict) + +def test_create_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryTermRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryTermRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary_term( + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name='name_value') + assert arg == mock_val + arg = args[0].term_id + mock_val = 'term_id_value' + assert arg == mock_val + + +def test_create_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + +@pytest.mark.asyncio +async def test_create_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary_term( + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name='name_value') + assert arg == mock_val + arg = args[0].term_id + mock_val = 'term_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryTermRequest, + dict, +]) +def test_update_glossary_term(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_update_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryTermRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryTermRequest( + ) + +def test_update_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary_term] = mock_rpc + request = {} + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_glossary_term in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_glossary_term] = mock_rpc + + request = {} + await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryTermRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_update_glossary_term_async_from_dict(): + await test_update_glossary_term_async(request_type=dict) + +def test_update_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryTermRequest() + + request.term.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'term.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryTermRequest() + + request.term.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'term.name=name_value', + ) in kw['metadata'] + + +def test_update_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary_term( + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary_term( + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryTermRequest, + dict, +]) +def test_delete_glossary_term(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryTermRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryTermRequest( + name='name_value', + ) + +def test_delete_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary_term] = mock_rpc + request = {} + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_glossary_term in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_glossary_term] = mock_rpc + + request = {} + await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryTermRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_glossary_term_async_from_dict(): + await test_delete_glossary_term_async(request_type=dict) + +def test_delete_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryTermRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + call.return_value = None + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryTermRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary_term( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary_term( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryTermRequest, + dict, +]) +def test_get_glossary_term(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + response = client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +def test_get_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryTermRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryTermRequest( + name='name_value', + ) + +def test_get_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary_term] = mock_rpc + request = {} + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_glossary_term in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_glossary_term] = mock_rpc + + request = {} + await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryTermRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + response = await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_get_glossary_term_async_from_dict(): + await test_get_glossary_term_async(request_type=dict) + +def test_get_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryTermRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryTermRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary_term( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary_term( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossaryTermsRequest, + dict, +]) +def test_list_glossary_terms(request_type, transport: str = 'grpc'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryTermsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_glossary_terms_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossaryTermsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_glossary_terms(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossaryTermsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_glossary_terms_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossary_terms in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossary_terms] = mock_rpc + request = {} + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_glossary_terms in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_glossary_terms] = mock_rpc + + request = {} + await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_glossary_terms_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossaryTermsRequest): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryTermsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_from_dict(): + await test_list_glossary_terms_async(request_type=dict) + +def test_list_glossary_terms_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryTermsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + call.return_value = business_glossary.ListGlossaryTermsResponse() + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_glossary_terms_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryTermsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse()) + await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_glossary_terms_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossary_terms( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_glossary_terms_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_glossary_terms_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossary_terms( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_glossary_terms_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent='parent_value', + ) + + +def test_list_glossary_terms_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token='def', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_glossary_terms(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) + for i in results) +def test_list_glossary_terms_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token='def', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossary_terms(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token='def', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossary_terms(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token='def', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossary_terms(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + + request = {} + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_rest_required_fields(request_type=business_glossary.CreateGlossaryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["glossary_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "glossaryId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "glossaryId" in jsonified_request + assert jsonified_request["glossaryId"] == request_init["glossary_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["glossaryId"] = 'glossary_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("glossary_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "glossaryId" in jsonified_request + assert jsonified_request["glossaryId"] == 'glossary_id_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary(request) + + expected_params = [ + ( + "glossaryId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(("glossaryId", "validateOnly", )) & set(("parent", "glossaryId", "glossary", ))) + + +def test_create_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, args[1]) + + +def test_create_glossary_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent='parent_value', + glossary=business_glossary.Glossary(name='name_value'), + glossary_id='glossary_id_value', + ) + + +def test_update_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc + + request = {} + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_rest_required_fields(request_type=business_glossary.UpdateGlossaryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("glossary", "updateMask", ))) + + +def test_update_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{glossary.name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + + +def test_update_glossary_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + + request = {} + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_rest_required_fields(request_type=business_glossary.DeleteGlossaryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + + +def test_delete_glossary_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name='name_value', + ) + + +def test_get_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + + request = {} + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_rest_required_fields(request_type=business_glossary.GetGlossaryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + + +def test_get_glossary_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + business_glossary.GetGlossaryRequest(), + name='name_value', + ) + + +def test_list_glossaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossaries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + + request = {} + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossaries_rest_required_fields(request_type=business_glossary.ListGlossariesRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossaries(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_glossaries_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_glossaries._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_glossaries_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, args[1]) + + +def test_list_glossaries_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent='parent_value', + ) + + +def test_list_glossaries_rest_pager(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token='def', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(business_glossary.ListGlossariesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_glossaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.Glossary) + for i in results) + + pages = list(client.list_glossaries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary_category] = mock_rpc + + request = {} + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_category_rest_required_fields(request_type=business_glossary.CreateGlossaryCategoryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["category_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "categoryId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "categoryId" in jsonified_request + assert jsonified_request["categoryId"] == request_init["category_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["categoryId"] = 'category_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_category._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("category_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "categoryId" in jsonified_request + assert jsonified_request["categoryId"] == 'category_id_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary_category(request) + + expected_params = [ + ( + "categoryId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(("categoryId", )) & set(("parent", "categoryId", "category", ))) + + +def test_create_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" % client.transport._host, args[1]) + + +def test_create_glossary_category_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent='parent_value', + category=business_glossary.GlossaryCategory(name='name_value'), + category_id='category_id_value', + ) + + +def test_update_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary_category] = mock_rpc + + request = {} + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_category_rest_required_fields(request_type=business_glossary.UpdateGlossaryCategoryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_category._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary_category(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("category", "updateMask", ))) + + +def test_update_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + + +def test_update_glossary_category_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary_category] = mock_rpc + + request = {} + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_category_rest_required_fields(request_type=business_glossary.DeleteGlossaryCategoryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary_category(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + + +def test_delete_glossary_category_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name='name_value', + ) + + +def test_get_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_category in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary_category] = mock_rpc + + request = {} + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_category_rest_required_fields(request_type=business_glossary.GetGlossaryCategoryRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary_category(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + + +def test_get_glossary_category_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name='name_value', + ) + + +def test_list_glossary_categories_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossary_categories in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossary_categories] = mock_rpc + + request = {} + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossary_categories_rest_required_fields(request_type=business_glossary.ListGlossaryCategoriesRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_categories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_categories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossary_categories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_glossary_categories_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_glossary_categories._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_glossary_categories_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossary_categories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" % client.transport._host, args[1]) + + +def test_list_glossary_categories_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent='parent_value', + ) + + +def test_list_glossary_categories_rest_pager(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token='def', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(business_glossary.ListGlossaryCategoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + pager = client.list_glossary_categories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) + for i in results) + + pages = list(client.list_glossary_categories(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_glossary_term] = mock_rpc + + request = {} + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_term_rest_required_fields(request_type=business_glossary.CreateGlossaryTermRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["term_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "termId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "termId" in jsonified_request + assert jsonified_request["termId"] == request_init["term_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["termId"] = 'term_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_term._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("term_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "termId" in jsonified_request + assert jsonified_request["termId"] == 'term_id_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary_term(request) + + expected_params = [ + ( + "termId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(("termId", )) & set(("parent", "termId", "term", ))) + + +def test_create_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" % client.transport._host, args[1]) + + +def test_create_glossary_term_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent='parent_value', + term=business_glossary.GlossaryTerm(name='name_value'), + term_id='term_id_value', + ) + + +def test_update_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_glossary_term] = mock_rpc + + request = {} + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_term_rest_required_fields(request_type=business_glossary.UpdateGlossaryTermRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_term._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary_term(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("term", "updateMask", ))) + + +def test_update_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + + +def test_update_glossary_term_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_glossary_term] = mock_rpc + + request = {} + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_term_rest_required_fields(request_type=business_glossary.DeleteGlossaryTermRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary_term(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + + +def test_delete_glossary_term_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name='name_value', + ) + + +def test_get_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_glossary_term] = mock_rpc + + request = {} + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_term_rest_required_fields(request_type=business_glossary.GetGlossaryTermRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary_term(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + + +def test_get_glossary_term_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name='name_value', + ) + + +def test_list_glossary_terms_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossary_terms in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_glossary_terms] = mock_rpc + + request = {} + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossary_terms_rest_required_fields(request_type=business_glossary.ListGlossaryTermsRequest): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_terms._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_terms._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossary_terms(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_glossary_terms_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_glossary_terms._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_glossary_terms_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossary_terms(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" % client.transport._host, args[1]) + + +def test_list_glossary_terms_rest_flattened_error(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent='parent_value', + ) + + +def test_list_glossary_terms_rest_pager(transport: str = 'rest'): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token='abc', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token='def', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token='ghi', + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(business_glossary.ListGlossaryTermsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + + pager = client.list_glossary_terms(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) + for i in results) + + pages = list(client.list_glossary_terms(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BusinessGlossaryServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + transports.BusinessGlossaryServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = BusinessGlossaryServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + call.return_value = business_glossary.Glossary() + client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossaries_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + call.return_value = business_glossary.ListGlossariesResponse() + client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + call.return_value = None + client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + call.return_value = business_glossary.GlossaryCategory() + client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_categories_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + call.return_value = None + client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + call.return_value = business_glossary.GlossaryTerm() + client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_terms_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + call.return_value = business_glossary.ListGlossaryTermsResponse() + client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BusinessGlossaryServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + term_count=1088, + category_count=1510, + etag='etag_value', + )) + await client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossaries_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossary_categories_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + )) + await client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossary_terms_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = BusinessGlossaryServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_glossary_rest_bad_request(request_type=business_glossary.CreateGlossaryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryRequest, + dict, +]) +def test_create_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["glossary"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'term_count': 1088, 'category_count': 1510, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryRequest.pb(business_glossary.CreateGlossaryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_rest_bad_request(request_type=business_glossary.UpdateGlossaryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryRequest, + dict, +]) +def test_update_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} + request_init["glossary"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'term_count': 1088, 'category_count': 1510, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryRequest.pb(business_glossary.UpdateGlossaryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_rest_bad_request(request_type=business_glossary.DeleteGlossaryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryRequest, + dict, +]) +def test_delete_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.DeleteGlossaryRequest.pb(business_glossary.DeleteGlossaryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.DeleteGlossaryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_glossary_rest_bad_request(request_type=business_glossary.GetGlossaryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryRequest, + dict, +]) +def test_get_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + term_count=1088, + category_count=1510, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryRequest.pb(business_glossary.GetGlossaryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.Glossary.to_json(business_glossary.Glossary()) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.Glossary() + post_with_metadata.return_value = business_glossary.Glossary(), metadata + + client.get_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossaries_rest_bad_request(request_type=business_glossary.ListGlossariesRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossaries(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossariesRequest, + dict, +]) +def test_list_glossaries_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossaries_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossaries") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossariesRequest.pb(business_glossary.ListGlossariesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossariesResponse.to_json(business_glossary.ListGlossariesResponse()) + req.return_value.content = return_value + + request = business_glossary.ListGlossariesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossariesResponse() + post_with_metadata.return_value = business_glossary.ListGlossariesResponse(), metadata + + client.list_glossaries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_glossary_category_rest_bad_request(request_type=business_glossary.CreateGlossaryCategoryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary_category(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryCategoryRequest, + dict, +]) +def test_create_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init["category"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryCategoryRequest.meta.fields["category"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["category"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["category"][field])): + del request_init["category"][field][i][subfield] + else: + del request_init["category"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_category") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_category_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_category") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryCategoryRequest.pb(business_glossary.CreateGlossaryCategoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryCategoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.create_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_category_rest_bad_request(request_type=business_glossary.UpdateGlossaryCategoryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary_category(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, +]) +def test_update_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} + request_init["category"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryCategoryRequest.meta.fields["category"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["category"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["category"][field])): + del request_init["category"][field][i][subfield] + else: + del request_init["category"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_category") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_category_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_category") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryCategoryRequest.pb(business_glossary.UpdateGlossaryCategoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryCategoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.update_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_category_rest_bad_request(request_type=business_glossary.DeleteGlossaryCategoryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary_category(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, +]) +def test_delete_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary_category(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_category") as pre: + pre.assert_not_called() + pb_message = business_glossary.DeleteGlossaryCategoryRequest.pb(business_glossary.DeleteGlossaryCategoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = business_glossary.DeleteGlossaryCategoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_glossary_category_rest_bad_request(request_type=business_glossary.GetGlossaryCategoryRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary_category(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryCategoryRequest, + dict, +]) +def test_get_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_category") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryCategoryRequest.pb(business_glossary.GetGlossaryCategoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryCategoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.get_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossary_categories_rest_bad_request(request_type=business_glossary.ListGlossaryCategoriesRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossary_categories(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossaryCategoriesRequest, + dict, +]) +def test_list_glossary_categories_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossary_categories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossary_categories_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_categories") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_categories_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_categories") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossaryCategoriesRequest.pb(business_glossary.ListGlossaryCategoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossaryCategoriesResponse.to_json(business_glossary.ListGlossaryCategoriesResponse()) + req.return_value.content = return_value + + request = business_glossary.ListGlossaryCategoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossaryCategoriesResponse() + post_with_metadata.return_value = business_glossary.ListGlossaryCategoriesResponse(), metadata + + client.list_glossary_categories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_glossary_term_rest_bad_request(request_type=business_glossary.CreateGlossaryTermRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary_term(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.CreateGlossaryTermRequest, + dict, +]) +def test_create_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init["term"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryTermRequest.meta.fields["term"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["term"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["term"][field])): + del request_init["term"][field][i][subfield] + else: + del request_init["term"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_term") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryTermRequest.pb(business_glossary.CreateGlossaryTermRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryTermRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.create_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_term_rest_bad_request(request_type=business_glossary.UpdateGlossaryTermRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary_term(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.UpdateGlossaryTermRequest, + dict, +]) +def test_update_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} + request_init["term"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryTermRequest.meta.fields["term"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["term"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["term"][field])): + del request_init["term"][field][i][subfield] + else: + del request_init["term"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_term") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryTermRequest.pb(business_glossary.UpdateGlossaryTermRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryTermRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.update_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_term_rest_bad_request(request_type=business_glossary.DeleteGlossaryTermRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary_term(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.DeleteGlossaryTermRequest, + dict, +]) +def test_delete_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary_term(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_term") as pre: + pre.assert_not_called() + pb_message = business_glossary.DeleteGlossaryTermRequest.pb(business_glossary.DeleteGlossaryTermRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = business_glossary.DeleteGlossaryTermRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_glossary_term_rest_bad_request(request_type=business_glossary.GetGlossaryTermRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary_term(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.GetGlossaryTermRequest, + dict, +]) +def test_get_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name='name_value', + uid='uid_value', + display_name='display_name_value', + description='description_value', + parent='parent_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent == 'parent_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_term") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryTermRequest.pb(business_glossary.GetGlossaryTermRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryTermRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.get_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossary_terms_rest_bad_request(request_type=business_glossary.ListGlossaryTermsRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossary_terms(request) + + +@pytest.mark.parametrize("request_type", [ + business_glossary.ListGlossaryTermsRequest, + dict, +]) +def test_list_glossary_terms_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossary_terms(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossary_terms_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms") as post, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_terms") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossaryTermsRequest.pb(business_glossary.ListGlossaryTermsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossaryTermsResponse.to_json(business_glossary.ListGlossaryTermsResponse()) + req.return_value.content = return_value + + request = business_glossary.ListGlossaryTermsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossaryTermsResponse() + post_with_metadata.return_value = business_glossary.ListGlossaryTermsResponse(), metadata + + client.list_glossary_terms(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary), + '__call__') as call: + client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary), + '__call__') as call: + client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary), + '__call__') as call: + client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary), + '__call__') as call: + client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossaries_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), + '__call__') as call: + client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), + '__call__') as call: + client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), + '__call__') as call: + client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), + '__call__') as call: + client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), + '__call__') as call: + client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_categories_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + '__call__') as call: + client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), + '__call__') as call: + client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), + '__call__') as call: + client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), + '__call__') as call: + client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), + '__call__') as call: + client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_terms_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + '__call__') as call: + client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_business_glossary_service_rest_lro_client(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BusinessGlossaryServiceGrpcTransport, + ) + +def test_business_glossary_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BusinessGlossaryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_business_glossary_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.BusinessGlossaryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_glossary', + 'update_glossary', + 'delete_glossary', + 'get_glossary', + 'list_glossaries', + 'create_glossary_category', + 'update_glossary_category', + 'delete_glossary_category', + 'get_glossary_category', + 'list_glossary_categories', + 'create_glossary_term', + 'update_glossary_term', + 'delete_glossary_term', + 'get_glossary_term', + 'list_glossary_terms', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_business_glossary_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessGlossaryServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_business_glossary_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessGlossaryServiceTransport() + adc.assert_called_once() + + +def test_business_glossary_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BusinessGlossaryServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + transports.BusinessGlossaryServiceRestTransport, + ], +) +def test_business_glossary_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BusinessGlossaryServiceGrpcTransport, grpc_helpers), + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_business_glossary_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) +def test_business_glossary_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_business_glossary_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.BusinessGlossaryServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_business_glossary_service_host_no_port(transport_name): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_business_glossary_service_host_with_port(transport_name): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_business_glossary_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BusinessGlossaryServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BusinessGlossaryServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_glossary._session + session2 = client2.transport.create_glossary._session + assert session1 != session2 + session1 = client1.transport.update_glossary._session + session2 = client2.transport.update_glossary._session + assert session1 != session2 + session1 = client1.transport.delete_glossary._session + session2 = client2.transport.delete_glossary._session + assert session1 != session2 + session1 = client1.transport.get_glossary._session + session2 = client2.transport.get_glossary._session + assert session1 != session2 + session1 = client1.transport.list_glossaries._session + session2 = client2.transport.list_glossaries._session + assert session1 != session2 + session1 = client1.transport.create_glossary_category._session + session2 = client2.transport.create_glossary_category._session + assert session1 != session2 + session1 = client1.transport.update_glossary_category._session + session2 = client2.transport.update_glossary_category._session + assert session1 != session2 + session1 = client1.transport.delete_glossary_category._session + session2 = client2.transport.delete_glossary_category._session + assert session1 != session2 + session1 = client1.transport.get_glossary_category._session + session2 = client2.transport.get_glossary_category._session + assert session1 != session2 + session1 = client1.transport.list_glossary_categories._session + session2 = client2.transport.list_glossary_categories._session + assert session1 != session2 + session1 = client1.transport.create_glossary_term._session + session2 = client2.transport.create_glossary_term._session + assert session1 != session2 + session1 = client1.transport.update_glossary_term._session + session2 = client2.transport.update_glossary_term._session + assert session1 != session2 + session1 = client1.transport.delete_glossary_term._session + session2 = client2.transport.delete_glossary_term._session + assert session1 != session2 + session1 = client1.transport.get_glossary_term._session + session2 = client2.transport.get_glossary_term._session + assert session1 != session2 + session1 = client1.transport.list_glossary_terms._session + session2 = client2.transport.list_glossary_terms._session + assert session1 != session2 +def test_business_glossary_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessGlossaryServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_business_glossary_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessGlossaryServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) +def test_business_glossary_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) +def test_business_glossary_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_business_glossary_service_grpc_lro_client(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_business_glossary_service_grpc_lro_async_client(): + client = BusinessGlossaryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_glossary_path(): + project = "squid" + location = "clam" + glossary = "whelk" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + actual = BusinessGlossaryServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "octopus", + "location": "oyster", + "glossary": "nudibranch", + } + path = BusinessGlossaryServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_path(path) + assert expected == actual + +def test_glossary_category_path(): + project = "cuttlefish" + location = "mussel" + glossary = "winkle" + glossary_category = "nautilus" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format(project=project, location=location, glossary=glossary, glossary_category=glossary_category, ) + actual = BusinessGlossaryServiceClient.glossary_category_path(project, location, glossary, glossary_category) + assert expected == actual + + +def test_parse_glossary_category_path(): + expected = { + "project": "scallop", + "location": "abalone", + "glossary": "squid", + "glossary_category": "clam", + } + path = BusinessGlossaryServiceClient.glossary_category_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_category_path(path) + assert expected == actual + +def test_glossary_term_path(): + project = "whelk" + location = "octopus" + glossary = "oyster" + glossary_term = "nudibranch" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format(project=project, location=location, glossary=glossary, glossary_term=glossary_term, ) + actual = BusinessGlossaryServiceClient.glossary_term_path(project, location, glossary, glossary_term) + assert expected == actual + + +def test_parse_glossary_term_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "glossary": "winkle", + "glossary_term": "nautilus", + } + path = BusinessGlossaryServiceClient.glossary_term_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_term_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BusinessGlossaryServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = BusinessGlossaryServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = BusinessGlossaryServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = BusinessGlossaryServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BusinessGlossaryServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = BusinessGlossaryServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = BusinessGlossaryServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = BusinessGlossaryServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BusinessGlossaryServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = BusinessGlossaryServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.BusinessGlossaryServiceTransport, '_prep_wrapped_messages') as prep: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.BusinessGlossaryServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = BusinessGlossaryServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport), + (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py new file mode 100644 index 000000000000..1fc6dcca2c3b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -0,0 +1,24562 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceAsyncClient +from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceClient +from google.cloud.dataplex_v1.services.catalog_service import pagers +from google.cloud.dataplex_v1.services.catalog_service import transports +from google.cloud.dataplex_v1.types import catalog +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CatalogServiceClient._get_default_mtls_endpoint(None) is None + assert CatalogServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CatalogServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CatalogServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CatalogServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CatalogServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CatalogServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CatalogServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CatalogServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CatalogServiceClient._get_client_cert_source(None, False) is None + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CatalogServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CatalogServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT + assert CatalogServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CatalogServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CatalogServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CatalogServiceClient._get_universe_domain(None, None) == CatalogServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CatalogServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CatalogServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CatalogServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (CatalogServiceClient, "grpc"), + (CatalogServiceAsyncClient, "grpc_asyncio"), + (CatalogServiceClient, "rest"), +]) +def test_catalog_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CatalogServiceGrpcTransport, "grpc"), + (transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CatalogServiceRestTransport, "rest"), +]) +def test_catalog_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CatalogServiceClient, "grpc"), + (CatalogServiceAsyncClient, "grpc_asyncio"), + (CatalogServiceClient, "rest"), +]) +def test_catalog_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_catalog_service_client_get_transport_class(): + transport = CatalogServiceClient.get_transport_class() + available_transports = [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceRestTransport, + ] + assert transport in available_transports + + transport = CatalogServiceClient.get_transport_class("grpc") + assert transport == transports.CatalogServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest"), +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test_catalog_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "true"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "false"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", "true"), + (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_catalog_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CatalogServiceClient, CatalogServiceAsyncClient +]) +@mock.patch.object(CatalogServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceAsyncClient)) +def test_catalog_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CatalogServiceClient, CatalogServiceAsyncClient +]) +@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) +@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) +def test_catalog_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CatalogServiceClient._DEFAULT_UNIVERSE + default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest"), +]) +def test_catalog_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", None), +]) +def test_catalog_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_catalog_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CatalogServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_catalog_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryTypeRequest, + dict, +]) +def test_create_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryTypeRequest( + parent='parent_value', + entry_type_id='entry_type_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryTypeRequest( + parent='parent_value', + entry_type_id='entry_type_id_value', + ) + +def test_create_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc + request = {} + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry_type] = mock_rpc + + request = {} + await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_entry_type_async_from_dict(): + await test_create_entry_type_async(request_type=dict) + +def test_create_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_type( + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].entry_type_id + mock_val = 'entry_type_id_value' + assert arg == mock_val + + +def test_create_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_type( + catalog.CreateEntryTypeRequest(), + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_type( + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].entry_type_id + mock_val = 'entry_type_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_type( + catalog.CreateEntryTypeRequest(), + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryTypeRequest, + dict, +]) +def test_update_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryTypeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryTypeRequest( + ) + +def test_update_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc + request = {} + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry_type] = mock_rpc + + request = {} + await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_entry_type_async_from_dict(): + await test_update_entry_type_async(request_type=dict) + +def test_update_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryTypeRequest() + + request.entry_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_type.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryTypeRequest() + + request.entry_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_type.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_type( + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_type( + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_type + mock_val = catalog.EntryType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryTypeRequest, + dict, +]) +def test_delete_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryTypeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryTypeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc + request = {} + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry_type] = mock_rpc + + request = {} + await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_entry_type_async_from_dict(): + await test_delete_entry_type_async(request_type=dict) + +def test_delete_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryTypesRequest, + dict, +]) +def test_list_entry_types(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_entry_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntryTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entry_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntryTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_entry_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc + request = {} + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entry_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entry_types] = mock_rpc + + request = {} + await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryTypesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_entry_types_async_from_dict(): + await test_list_entry_types_async(request_type=dict) + +def test_list_entry_types_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_types_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) + await client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_types_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_types_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_types_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_types_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent='parent_value', + ) + + +def test_list_entry_types_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_types(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryType) + for i in results) +def test_list_entry_types_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_types_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.EntryType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_types_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryTypeRequest, + dict, +]) +def test_get_entry_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + ) + response = client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.type_aliases == ['type_aliases_value'] + assert response.platform == 'platform_value' + assert response.system == 'system_value' + + +def test_get_entry_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryTypeRequest( + name='name_value', + ) + +def test_get_entry_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + request = {} + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry_type] = mock_rpc + + request = {} + await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + )) + response = await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.type_aliases == ['type_aliases_value'] + assert response.platform == 'platform_value' + assert response.system == 'system_value' + + +@pytest.mark.asyncio +async def test_get_entry_type_async_from_dict(): + await test_get_entry_type_async(request_type=dict) + +def test_get_entry_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) + await client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_type( + catalog.GetEntryTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_type( + catalog.GetEntryTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateAspectTypeRequest, + dict, +]) +def test_create_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateAspectTypeRequest( + parent='parent_value', + aspect_type_id='aspect_type_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateAspectTypeRequest( + parent='parent_value', + aspect_type_id='aspect_type_id_value', + ) + +def test_create_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc + request = {} + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_aspect_type] = mock_rpc + + request = {} + await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_aspect_type_async_from_dict(): + await test_create_aspect_type_async(request_type=dict) + +def test_create_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateAspectTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateAspectTypeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_aspect_type( + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].aspect_type_id + mock_val = 'aspect_type_id_value' + assert arg == mock_val + + +def test_create_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_aspect_type( + catalog.CreateAspectTypeRequest(), + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + +@pytest.mark.asyncio +async def test_create_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_aspect_type( + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].aspect_type_id + mock_val = 'aspect_type_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_aspect_type( + catalog.CreateAspectTypeRequest(), + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateAspectTypeRequest, + dict, +]) +def test_update_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateAspectTypeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateAspectTypeRequest( + ) + +def test_update_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc + request = {} + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_aspect_type] = mock_rpc + + request = {} + await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_aspect_type_async_from_dict(): + await test_update_aspect_type_async(request_type=dict) + +def test_update_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateAspectTypeRequest() + + request.aspect_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'aspect_type.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateAspectTypeRequest() + + request.aspect_type.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'aspect_type.name=name_value', + ) in kw['metadata'] + + +def test_update_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_aspect_type( + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_aspect_type( + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].aspect_type + mock_val = catalog.AspectType(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteAspectTypeRequest, + dict, +]) +def test_delete_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteAspectTypeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteAspectTypeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc + request = {} + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_aspect_type] = mock_rpc + + request = {} + await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_aspect_type_async_from_dict(): + await test_delete_aspect_type_async(request_type=dict) + +def test_delete_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListAspectTypesRequest, + dict, +]) +def test_list_aspect_types(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListAspectTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAspectTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_aspect_types_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListAspectTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_aspect_types(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListAspectTypesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_aspect_types_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_aspect_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc + request = {} + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_aspect_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_aspect_types in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_aspect_types] = mock_rpc + + request = {} + await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_aspect_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListAspectTypesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListAspectTypesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAspectTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_aspect_types_async_from_dict(): + await test_list_aspect_types_async(request_type=dict) + +def test_list_aspect_types_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListAspectTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_aspect_types_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListAspectTypesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) + await client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_aspect_types_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_aspect_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_aspect_types_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_aspect_types_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListAspectTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_aspect_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_aspect_types_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent='parent_value', + ) + + +def test_list_aspect_types_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_aspect_types(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.AspectType) + for i in results) +def test_list_aspect_types_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_aspect_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_aspect_types_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_aspect_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.AspectType) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_aspect_types_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_aspect_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetAspectTypeRequest, + dict, +]) +def test_get_aspect_type(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + response = client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.AspectType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +def test_get_aspect_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetAspectTypeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_aspect_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetAspectTypeRequest( + name='name_value', + ) + +def test_get_aspect_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + request = {} + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_aspect_type in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_aspect_type] = mock_rpc + + request = {} + await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetAspectTypeRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + response = await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetAspectTypeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.AspectType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.asyncio +async def test_get_aspect_type_async_from_dict(): + await test_get_aspect_type_async(request_type=dict) + +def test_get_aspect_type_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_aspect_type_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetAspectTypeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) + await client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_aspect_type_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_aspect_type_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_aspect_type( + catalog.GetAspectTypeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_aspect_type_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.AspectType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_aspect_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_aspect_type_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_aspect_type( + catalog.GetAspectTypeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryGroupRequest, + dict, +]) +def test_create_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryGroupRequest( + parent='parent_value', + entry_group_id='entry_group_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryGroupRequest( + parent='parent_value', + entry_group_id='entry_group_id_value', + ) + +def test_create_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc + request = {} + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry_group] = mock_rpc + + request = {} + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + +def test_create_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_group( + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + + +def test_create_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_group( + catalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_group( + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_group( + catalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryGroupRequest, + dict, +]) +def test_update_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryGroupRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryGroupRequest( + ) + +def test_update_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc + request = {} + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry_group] = mock_rpc + + request = {} + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + +def test_update_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_group( + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_group( + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = catalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryGroupRequest, + dict, +]) +def test_delete_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryGroupRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryGroupRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc + request = {} + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry_group] = mock_rpc + + request = {} + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + +def test_delete_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryGroupsRequest, + dict, +]) +def test_list_entry_groups(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_entry_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntryGroupsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entry_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntryGroupsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_entry_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_groups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc + request = {} + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_groups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entry_groups in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entry_groups] = mock_rpc + + request = {} + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryGroupsRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntryGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + +def test_list_entry_groups_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_groups_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_groups_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntryGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + + +def test_list_entry_groups_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_groups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryGroup) + for i in results) +def test_list_entry_groups_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_groups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_groups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.EntryGroup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryGroupRequest, + dict, +]) +def test_get_entry_group(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + response = client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryGroup) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +def test_get_entry_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryGroupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryGroupRequest( + name='name_value', + ) + +def test_get_entry_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + request = {} + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry_group in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry_group] = mock_rpc + + request = {} + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryGroupRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + response = await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryGroup) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + +def test_get_entry_group_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_group_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_group_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_group_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_group( + catalog.GetEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_group( + catalog.GetEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryRequest, + dict, +]) +def test_create_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_create_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryRequest( + parent='parent_value', + entry_id='entry_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryRequest( + parent='parent_value', + entry_id='entry_id_value', + ) + +def test_create_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + request = {} + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry] = mock_rpc + + request = {} + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + +def test_create_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry( + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + + +def test_create_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry( + catalog.CreateEntryRequest(), + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry( + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry( + catalog.CreateEntryRequest(), + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryRequest, + dict, +]) +def test_update_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_update_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.UpdateEntryRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.UpdateEntryRequest( + ) + +def test_update_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + request = {} + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entry] = mock_rpc + + request = {} + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.UpdateEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + +def test_update_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry( + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry( + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = catalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryRequest, + dict, +]) +def test_delete_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_delete_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryRequest( + name='name_value', + ) + +def test_delete_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + request = {} + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry] = mock_rpc + + request = {} + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + +def test_delete_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + catalog.DeleteEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry( + catalog.DeleteEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntriesRequest, + dict, +]) +def test_list_entries(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListEntriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListEntriesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + request = {} + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entries] = mock_rpc + + request = {} + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntriesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + +def test_list_entries_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entries_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entries_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entries_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entries( + catalog.ListEntriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entries_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entries_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entries( + catalog.ListEntriesRequest(), + parent='parent_value', + ) + + +def test_list_entries_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.Entry) + for i in results) +def test_list_entries_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entries_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.Entry) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entries_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryRequest, + dict, +]) +def test_get_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_get_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryRequest( + name='name_value', + ) + +def test_get_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + request = {} + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry] = mock_rpc + + request = {} + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + +def test_get_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry( + catalog.GetEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry( + catalog.GetEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.LookupEntryRequest, + dict, +]) +def test_lookup_entry(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + response = client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.LookupEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +def test_lookup_entry_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.LookupEntryRequest( + name='name_value', + entry='entry_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.lookup_entry(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.LookupEntryRequest( + name='name_value', + entry='entry_value', + ) + +def test_lookup_entry_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + request = {} + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_lookup_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.lookup_entry in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.lookup_entry] = mock_rpc + + request = {} + await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.LookupEntryRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + response = await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.LookupEntryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + +def test_lookup_entry_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.LookupEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_lookup_entry_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.LookupEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) + await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + catalog.SearchEntriesRequest, + dict, +]) +def test_search_entries(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.SearchEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEntriesPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_search_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.SearchEntriesRequest( + name='name_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + scope='scope_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.search_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.SearchEntriesRequest( + name='name_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + scope='scope_value', + ) + +def test_search_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + request = {} + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_entries] = mock_rpc + + request = {} + await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_search_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.SearchEntriesRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.SearchEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEntriesAsyncPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_search_entries_async_from_dict(): + await test_search_entries_async(request_type=dict) + +def test_search_entries_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.SearchEntriesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_entries_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.SearchEntriesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) + await client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_search_entries_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_entries( + name='name_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + + +def test_search_entries_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_entries( + catalog.SearchEntriesRequest(), + name='name_value', + query='query_value', + ) + +@pytest.mark.asyncio +async def test_search_entries_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.SearchEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_entries( + name='name_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_search_entries_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_entries( + catalog.SearchEntriesRequest(), + name='name_value', + query='query_value', + ) + + +def test_search_entries_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('name', ''), + )), + ) + pager = client.search_entries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.SearchEntriesResult) + for i in results) +def test_search_entries_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_entries_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.SearchEntriesResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_search_entries_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.CreateMetadataJobRequest, + dict, +]) +def test_create_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateMetadataJobRequest( + parent='parent_value', + metadata_job_id='metadata_job_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateMetadataJobRequest( + parent='parent_value', + metadata_job_id='metadata_job_id_value', + ) + +def test_create_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc + request = {} + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_metadata_job] = mock_rpc + + request = {} + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_from_dict(): + await test_create_metadata_job_async(request_type=dict) + +def test_create_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metadata_job( + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name='name_value') + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = 'metadata_job_id_value' + assert arg == mock_val + + +def test_create_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metadata_job( + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name='name_value') + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = 'metadata_job_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetMetadataJobRequest, + dict, +]) +def test_get_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + ) + response = client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +def test_get_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetMetadataJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetMetadataJobRequest( + name='name_value', + ) + +def test_get_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc + request = {} + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_metadata_job] = mock_rpc + + request = {} + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.GetMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + )) + response = await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_from_dict(): + await test_get_metadata_job_async(request_type=dict) + +def test_get_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListMetadataJobsRequest, + dict, +]) +def test_list_metadata_jobs(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListMetadataJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_metadata_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListMetadataJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_metadata_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_metadata_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc + request = {} + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_metadata_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_metadata_jobs] = mock_rpc + + request = {} + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async(transport: str = 'grpc_asyncio', request_type=catalog.ListMetadataJobsRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_from_dict(): + await test_list_metadata_jobs_async(request_type=dict) + +def test_list_metadata_jobs_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_metadata_jobs_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metadata_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_metadata_jobs_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metadata_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent='parent_value', + ) + + +def test_list_metadata_jobs_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) + for i in results) +def test_list_metadata_jobs_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metadata_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metadata_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.MetadataJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_metadata_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + catalog.CancelMetadataJobRequest, + dict, +]) +def test_cancel_metadata_job(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CancelMetadataJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CancelMetadataJobRequest( + name='name_value', + ) + +def test_cancel_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc + request = {} + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_metadata_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_metadata_job] = mock_rpc + + request = {} + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CancelMetadataJobRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_from_dict(): + await test_cancel_metadata_job_async(request_type=dict) + +def test_cancel_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = None + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_cancel_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_cancel_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_metadata_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryLinkRequest, + dict, +]) +def test_create_entry_link(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + response = client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +def test_create_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryLinkRequest( + parent='parent_value', + entry_link_id='entry_link_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryLinkRequest( + parent='parent_value', + entry_link_id='entry_link_id_value', + ) + +def test_create_entry_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_link] = mock_rpc + request = {} + client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entry_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entry_link] = mock_rpc + + request = {} + await client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryLinkRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + response = await client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.asyncio +async def test_create_entry_link_async_from_dict(): + await test_create_entry_link_async(request_type=dict) + +def test_create_entry_link_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_link_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_link( + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name='name_value') + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = 'entry_link_id_value' + assert arg == mock_val + + +def test_create_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + +@pytest.mark.asyncio +async def test_create_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_link( + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name='name_value') + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = 'entry_link_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryLinkRequest, + dict, +]) +def test_delete_entry_link(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + response = client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +def test_delete_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryLinkRequest( + name='name_value', + ) + +def test_delete_entry_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_link] = mock_rpc + request = {} + client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entry_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entry_link] = mock_rpc + + request = {} + await client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryLinkRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + response = await client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.asyncio +async def test_delete_entry_link_async_from_dict(): + await test_delete_entry_link_async(request_type=dict) + +def test_delete_entry_link_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_link_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryLinkRequest, + dict, +]) +def test_get_entry_link(request_type, transport: str = 'grpc'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + response = client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +def test_get_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryLinkRequest( + name='name_value', + ) + +def test_get_entry_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc + request = {} + client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entry_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entry_link] = mock_rpc + + request = {} + await client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryLinkRequest): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + response = await client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.asyncio +async def test_get_entry_link_async_from_dict(): + await test_get_entry_link_async(request_type=dict) + +def test_get_entry_link_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_link_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_link( + catalog.GetEntryLinkRequest(), + name='name_value', + ) + + +def test_create_entry_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc + + request = {} + client.create_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entry_type_rest_required_fields(request_type=catalog.CreateEntryTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["entry_type_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "entryTypeId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["entryTypeId"] = 'entry_type_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_type_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == 'entry_type_id_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_entry_type(request) + + expected_params = [ + ( + "entryTypeId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_entry_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("entryTypeId", "validateOnly", )) & set(("parent", "entryTypeId", "entryType", ))) + + +def test_create_entry_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_entry_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1]) + + +def test_create_entry_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_type( + catalog.CreateEntryTypeRequest(), + parent='parent_value', + entry_type=catalog.EntryType(name='name_value'), + entry_type_id='entry_type_id_value', + ) + + +def test_update_entry_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc + + request = {} + client.update_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entry_type_rest_required_fields(request_type=catalog.UpdateEntryTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_entry_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_entry_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("entryType", "updateMask", ))) + + +def test_update_entry_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_entry_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) + + +def test_update_entry_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_entry_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc + + request = {} + client.delete_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_entry_type_rest_required_fields(request_type=catalog.DeleteEntryTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_entry_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_entry_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_entry_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) + + +def test_delete_entry_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name='name_value', + ) + + +def test_list_entry_types_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc + + request = {} + client.list_entry_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entry_types_rest_required_fields(request_type=catalog.ListEntryTypesRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_entry_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_entry_types_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_entry_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_entry_types_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_entry_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1]) + + +def test_list_entry_types_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent='parent_value', + ) + + +def test_list_entry_types_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token='abc', + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token='def', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token='ghi', + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_entry_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryType) + for i in results) + + pages = list(client.list_entry_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + + request = {} + client.get_entry_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.EntryType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_entry_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_entry_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_entry_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_entry_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) + + +def test_get_entry_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_type( + catalog.GetEntryTypeRequest(), + name='name_value', + ) + + +def test_create_aspect_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc + + request = {} + client.create_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_aspect_type_rest_required_fields(request_type=catalog.CreateAspectTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["aspect_type_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "aspectTypeId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_aspect_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["aspectTypeId"] = 'aspect_type_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_aspect_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("aspect_type_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == 'aspect_type_id_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_aspect_type(request) + + expected_params = [ + ( + "aspectTypeId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_aspect_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("aspectTypeId", "validateOnly", )) & set(("parent", "aspectTypeId", "aspectType", ))) + + +def test_create_aspect_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_aspect_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1]) + + +def test_create_aspect_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_aspect_type( + catalog.CreateAspectTypeRequest(), + parent='parent_value', + aspect_type=catalog.AspectType(name='name_value'), + aspect_type_id='aspect_type_id_value', + ) + + +def test_update_aspect_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc + + request = {} + client.update_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_aspect_type_rest_required_fields(request_type=catalog.UpdateAspectTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_aspect_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_aspect_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_aspect_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_aspect_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("aspectType", "updateMask", ))) + + +def test_update_aspect_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_aspect_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) + + +def test_update_aspect_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc + + request = {} + client.delete_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_aspect_type_rest_required_fields(request_type=catalog.DeleteAspectTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_aspect_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_aspect_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_aspect_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_aspect_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_aspect_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_aspect_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) + + +def test_delete_aspect_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name='name_value', + ) + + +def test_list_aspect_types_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_aspect_types in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc + + request = {} + client.list_aspect_types(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_aspect_types(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_aspect_types_rest_required_fields(request_type=catalog.ListAspectTypesRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_aspect_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_aspect_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.ListAspectTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_aspect_types(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_aspect_types_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_aspect_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_aspect_types_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListAspectTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_aspect_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1]) + + +def test_list_aspect_types_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent='parent_value', + ) + + +def test_list_aspect_types_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token='abc', + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token='def', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token='ghi', + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_aspect_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.AspectType) + for i in results) + + pages = list(client.list_aspect_types(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_aspect_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_aspect_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + + request = {} + client.get_aspect_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_aspect_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_aspect_type_rest_required_fields(request_type=catalog.GetAspectTypeRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_aspect_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_aspect_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.AspectType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_aspect_type(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_aspect_type_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_aspect_type_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.AspectType() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_aspect_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) + + +def test_get_aspect_type_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_aspect_type( + catalog.GetAspectTypeRequest(), + name='name_value', + ) + + +def test_create_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc + + request = {} + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entry_group_rest_required_fields(request_type=catalog.CreateEntryGroupRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["entry_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "entryGroupId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["entryGroupId"] = 'entry_group_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_group_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == 'entry_group_id_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_entry_group(request) + + expected_params = [ + ( + "entryGroupId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_entry_group_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("entryGroupId", "validateOnly", )) & set(("parent", "entryGroupId", "entryGroup", ))) + + +def test_create_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_entry_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1]) + + +def test_create_entry_group_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_group( + catalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group=catalog.EntryGroup(name='name_value'), + entry_group_id='entry_group_id_value', + ) + + +def test_update_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc + + request = {} + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entry_group_rest_required_fields(request_type=catalog.UpdateEntryGroupRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_entry_group(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_entry_group_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("entryGroup", "updateMask", ))) + + +def test_update_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_entry_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) + + +def test_update_entry_group_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc + + request = {} + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_entry_group_rest_required_fields(request_type=catalog.DeleteEntryGroupRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_entry_group(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_entry_group_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) + + +def test_delete_entry_group_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name='name_value', + ) + + +def test_list_entry_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entry_groups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc + + request = {} + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entry_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entry_groups_rest_required_fields(request_type=catalog.ListEntryGroupsRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_entry_groups(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_entry_groups_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_entry_groups._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_entry_groups_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListEntryGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_entry_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1]) + + +def test_list_entry_groups_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + + +def test_list_entry_groups_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token='abc', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token='ghi', + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_entry_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryGroup) + for i in results) + + pages = list(client.list_entry_groups(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + + request = {} + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entry_group_rest_required_fields(request_type=catalog.GetEntryGroupRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.EntryGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_entry_group(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_entry_group_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_entry_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) + + +def test_get_entry_group_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_group( + catalog.GetEntryGroupRequest(), + name='name_value', + ) + + +def test_create_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + + request = {} + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["entry_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "entryId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == request_init["entry_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["entryId"] = 'entry_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == 'entry_id_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_entry(request) + + expected_params = [ + ( + "entryId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_entry_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(("entryId", )) & set(("parent", "entryId", "entry", ))) + + +def test_create_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1]) + + +def test_create_entry_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry( + catalog.CreateEntryRequest(), + parent='parent_value', + entry=catalog.Entry(name='name_value'), + entry_id='entry_id_value', + ) + + +def test_update_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + + request = {} + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "aspect_keys", "delete_missing_aspects", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_entry(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_entry_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "aspectKeys", "deleteMissingAspects", "updateMask", )) & set(("entry", ))) + + +def test_update_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) + + +def test_update_entry_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + + request = {} + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_entry(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_entry_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) + + +def test_delete_entry_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + catalog.DeleteEntryRequest(), + name='name_value', + ) + + +def test_list_entries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + + request = {} + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_entries(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_entries_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_entries._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_entries_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1]) + + +def test_list_entries_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entries( + catalog.ListEntriesRequest(), + parent='parent_value', + ) + + +def test_list_entries_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), + ], + next_page_token='abc', + ), + catalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + ], + next_page_token='ghi', + ), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + pager = client.list_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.Entry) + for i in results) + + pages = list(client.list_entries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + + request = {} + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("aspect_types", "paths", "view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_entry(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_entry_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(("aspectTypes", "paths", "view", )) & set(("name", ))) + + +def test_get_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) + + +def test_get_entry_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry( + catalog.GetEntryRequest(), + name='name_value', + ) + + +def test_lookup_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.lookup_entry in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + + request = {} + client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.lookup_entry(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["entry"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "entry" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entry" in jsonified_request + assert jsonified_request["entry"] == request_init["entry"] + + jsonified_request["name"] = 'name_value' + jsonified_request["entry"] = 'entry_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("aspect_types", "entry", "paths", "view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "entry" in jsonified_request + assert jsonified_request["entry"] == 'entry_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.lookup_entry(request) + + expected_params = [ + ( + "entry", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_lookup_entry_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.lookup_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(("aspectTypes", "entry", "paths", "view", )) & set(("name", "entry", ))) + + +def test_search_entries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + + request = {} + client.search_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["name"] = 'name_value' + jsonified_request["query"] = 'query_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_entries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", "query", "scope", "semantic_search", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "query" in jsonified_request + assert jsonified_request["query"] == 'query_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.SearchEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.SearchEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.search_entries(request) + + expected_params = [ + ( + "query", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_entries_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_entries._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", "query", "scope", "semanticSearch", )) & set(("name", "query", ))) + + +def test_search_entries_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.SearchEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + query='query_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.SearchEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.search_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*}:searchEntries" % client.transport._host, args[1]) + + +def test_search_entries_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_entries( + catalog.SearchEntriesRequest(), + name='name_value', + query='query_value', + ) + + +def test_search_entries_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + next_page_token='abc', + ), + catalog.SearchEntriesResponse( + results=[], + next_page_token='def', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + ], + next_page_token='ghi', + ), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'name': 'projects/sample1/locations/sample2'} + + pager = client.search_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.SearchEntriesResult) + for i in results) + + pages = list(client.search_entries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc + + request = {} + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_metadata_job_rest_required_fields(request_type=catalog.CreateMetadataJobRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_metadata_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("metadata_job_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_metadata_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("metadataJobId", "validateOnly", )) & set(("parent", "metadataJob", ))) + + +def test_create_metadata_job_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_metadata_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1]) + + +def test_create_metadata_job_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent='parent_value', + metadata_job=catalog.MetadataJob(name='name_value'), + metadata_job_id='metadata_job_id_value', + ) + + +def test_get_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc + + request = {} + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_metadata_job_rest_required_fields(request_type=catalog.GetMetadataJobRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_metadata_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_metadata_job_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_metadata_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/metadataJobs/*}" % client.transport._host, args[1]) + + +def test_get_metadata_job_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name='name_value', + ) + + +def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_metadata_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc + + request = {} + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_metadata_jobs_rest_required_fields(request_type=catalog.ListMetadataJobsRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_metadata_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_metadata_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_metadata_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_metadata_jobs_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_metadata_jobs_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_metadata_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1]) + + +def test_list_metadata_jobs_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent='parent_value', + ) + + +def test_list_metadata_jobs_rest_pager(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token='abc', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token='def', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token='ghi', + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_metadata_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) + for i in results) + + pages = list(client.list_metadata_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc + + request = {} + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_metadata_job_rest_required_fields(request_type=catalog.CancelMetadataJobRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_metadata_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_cancel_metadata_job_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.cancel_metadata_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" % client.transport._host, args[1]) + + +def test_cancel_metadata_job_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name='name_value', + ) + + +def test_create_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entry_link] = mock_rpc + + request = {} + client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entry_link_rest_required_fields(request_type=catalog.CreateEntryLinkRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["entry_link_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "entryLinkId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == request_init["entry_link_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["entryLinkId"] = 'entry_link_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_link_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == 'entry_link_id_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_entry_link(request) + + expected_params = [ + ( + "entryLinkId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(("entryLinkId", )) & set(("parent", "entryLinkId", "entryLink", ))) + + +def test_create_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks" % client.transport._host, args[1]) + + +def test_create_entry_link_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent='parent_value', + entry_link=catalog.EntryLink(name='name_value'), + entry_link_id='entry_link_id_value', + ) + + +def test_delete_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entry_link] = mock_rpc + + request = {} + client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_entry_link_rest_required_fields(request_type=catalog.DeleteEntryLinkRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_entry_link(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" % client.transport._host, args[1]) + + +def test_delete_entry_link_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name='name_value', + ) + + +def test_get_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc + + request = {} + client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entry_link_rest_required_fields(request_type=catalog.GetEntryLinkRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_entry_link(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" % client.transport._host, args[1]) + + +def test_get_entry_link_rest_flattened_error(transport: str = 'rest'): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CatalogServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CatalogServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + transports.CatalogServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = CatalogServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_aspect_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_groups_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + call.return_value = catalog.Entry() + client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + )) + await client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_aspect_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + await client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_groups_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + )) + await client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lookup_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + )) + await client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + )) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + await client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + await client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + )) + await client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CatalogServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_entry_type_rest_bad_request(request_type=catalog.CreateEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryTypeRequest, + dict, +]) +def test_create_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["entry_type"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'type_aliases': ['type_aliases_value1', 'type_aliases_value2'], 'platform': 'platform_value', 'system': 'system_value', 'required_aspects': [{'type_': 'type__value'}], 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateEntryTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entry_type_rest_bad_request(request_type=catalog.UpdateEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entry_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryTypeRequest, + dict, +]) +def test_update_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} + request_init["entry_type"] = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'type_aliases': ['type_aliases_value1', 'type_aliases_value2'], 'platform': 'platform_value', 'system': 'system_value', 'required_aspects': [{'type_': 'type__value'}], 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.UpdateEntryTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_type_rest_bad_request(request_type=catalog.DeleteEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryTypeRequest, + dict, +]) +def test_delete_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.DeleteEntryTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entry_types(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryTypesRequest, + dict, +]) +def test_list_entry_types_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entry_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entry_types_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_types") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entry_types") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListEntryTypesResponse.to_json(catalog.ListEntryTypesResponse()) + req.return_value.content = return_value + + request = catalog.ListEntryTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListEntryTypesResponse() + post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata + + client.list_entry_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryTypeRequest, + dict, +]) +def test_get_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + type_aliases=['type_aliases_value'], + platform='platform_value', + system='system_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.type_aliases == ['type_aliases_value'] + assert response.platform == 'platform_value' + assert response.system == 'system_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryType.to_json(catalog.EntryType()) + req.return_value.content = return_value + + request = catalog.GetEntryTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryType() + post_with_metadata.return_value = catalog.EntryType(), metadata + + client.get_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_aspect_type_rest_bad_request(request_type=catalog.CreateAspectTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_aspect_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateAspectTypeRequest, + dict, +]) +def test_create_aspect_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["aspect_type"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}, 'metadata_template': {'index': 536, 'name': 'name_value', 'type_': 'type__value', 'record_fields': {}, 'enum_values': [{'index': 536, 'name': 'name_value', 'deprecated': 'deprecated_value'}], 'map_items': {}, 'array_items': {}, 'type_id': 'type_id_value', 'type_ref': 'type_ref_value', 'constraints': {'required': True}, 'annotations': {'deprecated': 'deprecated_value', 'display_name': 'display_name_value', 'description': 'description_value', 'display_order': 1393, 'string_type': 'string_type_value', 'string_values': ['string_values_value1', 'string_values_value2']}}, 'transfer_status': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] + else: + del request_init["aspect_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_aspect_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_aspect_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_aspect_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_aspect_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_aspect_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateAspectTypeRequest.pb(catalog.CreateAspectTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateAspectTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_aspect_type_rest_bad_request(request_type=catalog.UpdateAspectTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_aspect_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateAspectTypeRequest, + dict, +]) +def test_update_aspect_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} + request_init["aspect_type"] = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}, 'metadata_template': {'index': 536, 'name': 'name_value', 'type_': 'type__value', 'record_fields': {}, 'enum_values': [{'index': 536, 'name': 'name_value', 'deprecated': 'deprecated_value'}], 'map_items': {}, 'array_items': {}, 'type_id': 'type_id_value', 'type_ref': 'type_ref_value', 'constraints': {'required': True}, 'annotations': {'deprecated': 'deprecated_value', 'display_name': 'display_name_value', 'description': 'description_value', 'display_order': 1393, 'string_type': 'string_type_value', 'string_values': ['string_values_value1', 'string_values_value2']}}, 'transfer_status': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] + else: + del request_init["aspect_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_aspect_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_aspect_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_aspect_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_aspect_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_aspect_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateAspectTypeRequest.pb(catalog.UpdateAspectTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.UpdateAspectTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_aspect_type_rest_bad_request(request_type=catalog.DeleteAspectTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_aspect_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteAspectTypeRequest, + dict, +]) +def test_delete_aspect_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_aspect_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_aspect_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_aspect_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_aspect_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteAspectTypeRequest.pb(catalog.DeleteAspectTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.DeleteAspectTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_aspect_types_rest_bad_request(request_type=catalog.ListAspectTypesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_aspect_types(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListAspectTypesRequest, + dict, +]) +def test_list_aspect_types_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListAspectTypesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_aspect_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAspectTypesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_aspect_types_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_aspect_types") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_aspect_types") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListAspectTypesResponse.to_json(catalog.ListAspectTypesResponse()) + req.return_value.content = return_value + + request = catalog.ListAspectTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListAspectTypesResponse() + post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata + + client.list_aspect_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_aspect_type(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetAspectTypeRequest, + dict, +]) +def test_get_aspect_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.AspectType( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_aspect_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.AspectType) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_aspect_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_aspect_type") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_aspect_type") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.AspectType.to_json(catalog.AspectType()) + req.return_value.content = return_value + + request = catalog.GetAspectTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.AspectType() + post_with_metadata.return_value = catalog.AspectType(), metadata + + client.get_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_entry_group_rest_bad_request(request_type=catalog.CreateEntryGroupRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_group(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryGroupRequest, + dict, +]) +def test_create_entry_group_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["entry_group"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'transfer_status': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] + else: + del request_init["entry_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_group_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_group") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_group_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_group") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryGroupRequest.pb(catalog.CreateEntryGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateEntryGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entry_group_rest_bad_request(request_type=catalog.UpdateEntryGroupRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entry_group(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryGroupRequest, + dict, +]) +def test_update_entry_group_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} + request_init["entry_group"] = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'transfer_status': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] + else: + del request_init["entry_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entry_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entry_group_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_group") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_group_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry_group") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateEntryGroupRequest.pb(catalog.UpdateEntryGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.UpdateEntryGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_group_rest_bad_request(request_type=catalog.DeleteEntryGroupRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry_group(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryGroupRequest, + dict, +]) +def test_delete_entry_group_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_group_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_group") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_group_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_group") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryGroupRequest.pb(catalog.DeleteEntryGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.DeleteEntryGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entry_groups_rest_bad_request(request_type=catalog.ListEntryGroupsRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entry_groups(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntryGroupsRequest, + dict, +]) +def test_list_entry_groups_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entry_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entry_groups_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_groups") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entry_groups") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListEntryGroupsResponse.to_json(catalog.ListEntryGroupsResponse()) + req.return_value.content = return_value + + request = catalog.ListEntryGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListEntryGroupsResponse() + post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata + + client.list_entry_groups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_group(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryGroupRequest, + dict, +]) +def test_get_entry_group_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryGroup( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryGroup) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_group_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_group") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_group") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) + req.return_value.content = return_value + + request = catalog.GetEntryGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryGroup() + post_with_metadata.return_value = catalog.EntryGroup(), metadata + + client.get_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryRequest, + dict, +]) +def test_create_entry_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request_init["entry"] = {'name': 'name_value', 'entry_type': 'entry_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'aspects': {}, 'parent_entry': 'parent_entry_value', 'fully_qualified_name': 'fully_qualified_name_value', 'entry_source': {'resource': 'resource_value', 'system': 'system_value', 'platform': 'platform_value', 'display_name': 'display_name_value', 'description': 'description_value', 'labels': {}, 'ancestors': [{'name': 'name_value', 'type_': 'type__value'}], 'create_time': {}, 'update_time': {}, 'location': 'location_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryRequest.meta.fields["entry"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] + else: + del request_init["entry"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.Entry.to_json(catalog.Entry()) + req.return_value.content = return_value + + request = catalog.CreateEntryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata + + client.create_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entry(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.UpdateEntryRequest, + dict, +]) +def test_update_entry_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} + request_init["entry"] = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4', 'entry_type': 'entry_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'aspects': {}, 'parent_entry': 'parent_entry_value', 'fully_qualified_name': 'fully_qualified_name_value', 'entry_source': {'resource': 'resource_value', 'system': 'system_value', 'platform': 'platform_value', 'display_name': 'display_name_value', 'description': 'description_value', 'labels': {}, 'ancestors': [{'name': 'name_value', 'type_': 'type__value'}], 'create_time': {}, 'update_time': {}, 'location': 'location_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryRequest.meta.fields["entry"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] + else: + del request_init["entry"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entry_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.Entry.to_json(catalog.Entry()) + req.return_value.content = return_value + + request = catalog.UpdateEntryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata + + client.update_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryRequest, + dict, +]) +def test_delete_entry_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.Entry.to_json(catalog.Entry()) + req.return_value.content = return_value + + request = catalog.DeleteEntryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata + + client.delete_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entries(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListEntriesRequest, + dict, +]) +def test_list_entries_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entries_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entries") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entries") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListEntriesResponse.to_json(catalog.ListEntriesResponse()) + req.return_value.content = return_value + + request = catalog.ListEntriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListEntriesResponse() + post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata + + client.list_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryRequest, + dict, +]) +def test_get_entry_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.Entry.to_json(catalog.Entry()) + req.return_value.content = return_value + + request = catalog.GetEntryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata + + client.get_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.lookup_entry(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.LookupEntryRequest, + dict, +]) +def test_lookup_entry_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry( + name='name_value', + entry_type='entry_type_value', + parent_entry='parent_entry_value', + fully_qualified_name='fully_qualified_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.lookup_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.Entry) + assert response.name == 'name_value' + assert response.entry_type == 'entry_type_value' + assert response.parent_entry == 'parent_entry_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_entry_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_lookup_entry") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_lookup_entry") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.Entry.to_json(catalog.Entry()) + req.return_value.content = return_value + + request = catalog.LookupEntryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata + + client.lookup_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_entries(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.SearchEntriesRequest, + dict, +]) +def test_search_entries_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.SearchEntriesResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.SearchEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEntriesPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_entries_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_search_entries") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_search_entries") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.SearchEntriesResponse.to_json(catalog.SearchEntriesResponse()) + req.return_value.content = return_value + + request = catalog.SearchEntriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.SearchEntriesResponse() + post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata + + client.search_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_metadata_job_rest_bad_request(request_type=catalog.CreateMetadataJobRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_metadata_job(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateMetadataJobRequest, + dict, +]) +def test_create_metadata_job_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["metadata_job"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'type_': 1, 'import_spec': {'source_storage_uri': 'source_storage_uri_value', 'source_create_time': {}, 'scope': {'entry_groups': ['entry_groups_value1', 'entry_groups_value2'], 'entry_types': ['entry_types_value1', 'entry_types_value2'], 'aspect_types': ['aspect_types_value1', 'aspect_types_value2'], 'glossaries': ['glossaries_value1', 'glossaries_value2'], 'entry_link_types': ['entry_link_types_value1', 'entry_link_types_value2'], 'referenced_entry_scopes': ['referenced_entry_scopes_value1', 'referenced_entry_scopes_value2']}, 'entry_sync_mode': 1, 'aspect_sync_mode': 1, 'log_level': 1}, 'export_spec': {'scope': {'organization_level': True, 'projects': ['projects_value1', 'projects_value2'], 'entry_groups': ['entry_groups_value1', 'entry_groups_value2'], 'entry_types': ['entry_types_value1', 'entry_types_value2'], 'aspect_types': ['aspect_types_value1', 'aspect_types_value2']}, 'output_path': 'output_path_value'}, 'import_result': {'deleted_entries': 1584, 'updated_entries': 1600, 'created_entries': 1585, 'unchanged_entries': 1798, 'recreated_entries': 1800, 'update_time': {}, 'deleted_entry_links': 2024, 'created_entry_links': 2025, 'unchanged_entry_links': 2238}, 'export_result': {'exported_entries': 1732, 'error_message': 'error_message_value'}, 'status': {'state': 1, 'message': 'message_value', 'completion_percent': 1930, 'update_time': {}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_job"][field])): + del request_init["metadata_job"][field][i][subfield] + else: + del request_init["metadata_job"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_metadata_job(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_metadata_job_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_metadata_job") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_metadata_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_metadata_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateMetadataJobRequest.pb(catalog.CreateMetadataJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateMetadataJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_metadata_job(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetMetadataJobRequest, + dict, +]) +def test_get_metadata_job_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob( + name='name_value', + uid='uid_value', + type_=catalog.MetadataJob.Type.IMPORT, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_metadata_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_metadata_job_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_metadata_job") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_metadata_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) + req.return_value.content = return_value + + request = catalog.GetMetadataJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.MetadataJob() + post_with_metadata.return_value = catalog.MetadataJob(), metadata + + client.get_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_metadata_jobs_rest_bad_request(request_type=catalog.ListMetadataJobsRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_metadata_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.ListMetadataJobsRequest, + dict, +]) +def test_list_metadata_jobs_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_metadata_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_metadata_jobs_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListMetadataJobsRequest.pb(catalog.ListMetadataJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListMetadataJobsResponse.to_json(catalog.ListMetadataJobsResponse()) + req.return_value.content = return_value + + request = catalog.ListMetadataJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListMetadataJobsResponse() + post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata + + client.list_metadata_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_metadata_job_rest_bad_request(request_type=catalog.CancelMetadataJobRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_metadata_job(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CancelMetadataJobRequest, + dict, +]) +def test_cancel_metadata_job_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_metadata_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_metadata_job_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job") as pre: + pre.assert_not_called() + pb_message = catalog.CancelMetadataJobRequest.pb(catalog.CancelMetadataJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = catalog.CancelMetadataJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_entry_link_rest_bad_request(request_type=catalog.CreateEntryLinkRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_link(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.CreateEntryLinkRequest, + dict, +]) +def test_create_entry_link_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} + request_init["entry_link"] = {'name': 'name_value', 'entry_link_type': 'entry_link_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'entry_references': [{'name': 'name_value', 'path': 'path_value', 'type_': 2}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryLinkRequest.meta.fields["entry_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_link"][field])): + del request_init["entry_link"][field][i][subfield] + else: + del request_init["entry_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_link_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_link") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_link_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_link") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryLinkRequest.pb(catalog.CreateEntryLinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) + req.return_value.content = return_value + + request = catalog.CreateEntryLinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata + + client.create_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_link_rest_bad_request(request_type=catalog.DeleteEntryLinkRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry_link(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.DeleteEntryLinkRequest, + dict, +]) +def test_delete_entry_link_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_link_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_link") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_link_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_link") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryLinkRequest.pb(catalog.DeleteEntryLinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) + req.return_value.content = return_value + + request = catalog.DeleteEntryLinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata + + client.delete_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_link_rest_bad_request(request_type=catalog.GetEntryLinkRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_link(request) + + +@pytest.mark.parametrize("request_type", [ + catalog.GetEntryLinkRequest, + dict, +]) +def test_get_entry_link_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink( + name='name_value', + entry_link_type='entry_link_type_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == 'name_value' + assert response.entry_link_type == 'entry_link_type_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_link_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_link") as post, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_link_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_link") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryLinkRequest.pb(catalog.GetEntryLinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) + req.return_value.content = return_value + + request = catalog.GetEntryLinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata + + client.get_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), + '__call__') as call: + client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), + '__call__') as call: + client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), + '__call__') as call: + client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_types_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_types), + '__call__') as call: + client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_type), + '__call__') as call: + client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_aspect_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), + '__call__') as call: + client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_aspect_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), + '__call__') as call: + client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_aspect_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), + '__call__') as call: + client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_aspect_types_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), + '__call__') as call: + client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_aspect_type_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_aspect_type), + '__call__') as call: + client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_group_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_group_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_groups_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entries_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_entry_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_entries_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_entries), + '__call__') as call: + client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), + '__call__') as call: + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_job), + '__call__') as call: + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + '__call__') as call: + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), + '__call__') as call: + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), + '__call__') as call: + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), + '__call__') as call: + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_link), + '__call__') as call: + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_catalog_service_rest_lro_client(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CatalogServiceGrpcTransport, + ) + +def test_catalog_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CatalogServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_catalog_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CatalogServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_entry_type', + 'update_entry_type', + 'delete_entry_type', + 'list_entry_types', + 'get_entry_type', + 'create_aspect_type', + 'update_aspect_type', + 'delete_aspect_type', + 'list_aspect_types', + 'get_aspect_type', + 'create_entry_group', + 'update_entry_group', + 'delete_entry_group', + 'list_entry_groups', + 'get_entry_group', + 'create_entry', + 'update_entry', + 'delete_entry', + 'list_entries', + 'get_entry', + 'lookup_entry', + 'search_entries', + 'create_metadata_job', + 'get_metadata_job', + 'list_metadata_jobs', + 'cancel_metadata_job', + 'create_entry_link', + 'delete_entry_link', + 'get_entry_link', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_catalog_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CatalogServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_catalog_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CatalogServiceTransport() + adc.assert_called_once() + + +def test_catalog_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CatalogServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + ], +) +def test_catalog_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + transports.CatalogServiceRestTransport, + ], +) +def test_catalog_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CatalogServiceGrpcTransport, grpc_helpers), + (transports.CatalogServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_catalog_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_catalog_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CatalogServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_catalog_service_host_no_port(transport_name): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_catalog_service_host_with_port(transport_name): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_catalog_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CatalogServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CatalogServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_entry_type._session + session2 = client2.transport.create_entry_type._session + assert session1 != session2 + session1 = client1.transport.update_entry_type._session + session2 = client2.transport.update_entry_type._session + assert session1 != session2 + session1 = client1.transport.delete_entry_type._session + session2 = client2.transport.delete_entry_type._session + assert session1 != session2 + session1 = client1.transport.list_entry_types._session + session2 = client2.transport.list_entry_types._session + assert session1 != session2 + session1 = client1.transport.get_entry_type._session + session2 = client2.transport.get_entry_type._session + assert session1 != session2 + session1 = client1.transport.create_aspect_type._session + session2 = client2.transport.create_aspect_type._session + assert session1 != session2 + session1 = client1.transport.update_aspect_type._session + session2 = client2.transport.update_aspect_type._session + assert session1 != session2 + session1 = client1.transport.delete_aspect_type._session + session2 = client2.transport.delete_aspect_type._session + assert session1 != session2 + session1 = client1.transport.list_aspect_types._session + session2 = client2.transport.list_aspect_types._session + assert session1 != session2 + session1 = client1.transport.get_aspect_type._session + session2 = client2.transport.get_aspect_type._session + assert session1 != session2 + session1 = client1.transport.create_entry_group._session + session2 = client2.transport.create_entry_group._session + assert session1 != session2 + session1 = client1.transport.update_entry_group._session + session2 = client2.transport.update_entry_group._session + assert session1 != session2 + session1 = client1.transport.delete_entry_group._session + session2 = client2.transport.delete_entry_group._session + assert session1 != session2 + session1 = client1.transport.list_entry_groups._session + session2 = client2.transport.list_entry_groups._session + assert session1 != session2 + session1 = client1.transport.get_entry_group._session + session2 = client2.transport.get_entry_group._session + assert session1 != session2 + session1 = client1.transport.create_entry._session + session2 = client2.transport.create_entry._session + assert session1 != session2 + session1 = client1.transport.update_entry._session + session2 = client2.transport.update_entry._session + assert session1 != session2 + session1 = client1.transport.delete_entry._session + session2 = client2.transport.delete_entry._session + assert session1 != session2 + session1 = client1.transport.list_entries._session + session2 = client2.transport.list_entries._session + assert session1 != session2 + session1 = client1.transport.get_entry._session + session2 = client2.transport.get_entry._session + assert session1 != session2 + session1 = client1.transport.lookup_entry._session + session2 = client2.transport.lookup_entry._session + assert session1 != session2 + session1 = client1.transport.search_entries._session + session2 = client2.transport.search_entries._session + assert session1 != session2 + session1 = client1.transport.create_metadata_job._session + session2 = client2.transport.create_metadata_job._session + assert session1 != session2 + session1 = client1.transport.get_metadata_job._session + session2 = client2.transport.get_metadata_job._session + assert session1 != session2 + session1 = client1.transport.list_metadata_jobs._session + session2 = client2.transport.list_metadata_jobs._session + assert session1 != session2 + session1 = client1.transport.cancel_metadata_job._session + session2 = client2.transport.cancel_metadata_job._session + assert session1 != session2 + session1 = client1.transport.create_entry_link._session + session2 = client2.transport.create_entry_link._session + assert session1 != session2 + session1 = client1.transport.delete_entry_link._session + session2 = client2.transport.delete_entry_link._session + assert session1 != session2 + session1 = client1.transport.get_entry_link._session + session2 = client2.transport.get_entry_link._session + assert session1 != session2 +def test_catalog_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CatalogServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_catalog_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CatalogServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) +def test_catalog_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_catalog_service_grpc_lro_client(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_catalog_service_grpc_lro_async_client(): + client = CatalogServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_aspect_type_path(): + project = "squid" + location = "clam" + aspect_type = "whelk" + expected = "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) + actual = CatalogServiceClient.aspect_type_path(project, location, aspect_type) + assert expected == actual + + +def test_parse_aspect_type_path(): + expected = { + "project": "octopus", + "location": "oyster", + "aspect_type": "nudibranch", + } + path = CatalogServiceClient.aspect_type_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_aspect_type_path(path) + assert expected == actual + +def test_entry_path(): + project = "cuttlefish" + location = "mussel" + entry_group = "winkle" + entry = "nautilus" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + actual = CatalogServiceClient.entry_path(project, location, entry_group, entry) + assert expected == actual + + +def test_parse_entry_path(): + expected = { + "project": "scallop", + "location": "abalone", + "entry_group": "squid", + "entry": "clam", + } + path = CatalogServiceClient.entry_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_path(path) + assert expected == actual + +def test_entry_group_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + actual = CatalogServiceClient.entry_group_path(project, location, entry_group) + assert expected == actual + + +def test_parse_entry_group_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "entry_group": "mussel", + } + path = CatalogServiceClient.entry_group_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_group_path(path) + assert expected == actual + +def test_entry_link_path(): + project = "winkle" + location = "nautilus" + entry_group = "scallop" + entry_link = "abalone" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format(project=project, location=location, entry_group=entry_group, entry_link=entry_link, ) + actual = CatalogServiceClient.entry_link_path(project, location, entry_group, entry_link) + assert expected == actual + + +def test_parse_entry_link_path(): + expected = { + "project": "squid", + "location": "clam", + "entry_group": "whelk", + "entry_link": "octopus", + } + path = CatalogServiceClient.entry_link_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_link_path(path) + assert expected == actual + +def test_entry_type_path(): + project = "oyster" + location = "nudibranch" + entry_type = "cuttlefish" + expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) + actual = CatalogServiceClient.entry_type_path(project, location, entry_type) + assert expected == actual + + +def test_parse_entry_type_path(): + expected = { + "project": "mussel", + "location": "winkle", + "entry_type": "nautilus", + } + path = CatalogServiceClient.entry_type_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_type_path(path) + assert expected == actual + +def test_glossary_path(): + project = "scallop" + location = "abalone" + glossary = "squid" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + actual = CatalogServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "clam", + "location": "whelk", + "glossary": "octopus", + } + path = CatalogServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_glossary_path(path) + assert expected == actual + +def test_metadata_job_path(): + project = "oyster" + location = "nudibranch" + metadataJob = "cuttlefish" + expected = "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) + actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) + assert expected == actual + + +def test_parse_metadata_job_path(): + expected = { + "project": "mussel", + "location": "winkle", + "metadataJob": "nautilus", + } + path = CatalogServiceClient.metadata_job_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_metadata_job_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CatalogServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CatalogServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = CatalogServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CatalogServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CatalogServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CatalogServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = CatalogServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CatalogServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CatalogServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CatalogServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = CatalogServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CatalogServiceClient, transports.CatalogServiceGrpcTransport), + (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py new file mode 100644 index 000000000000..8e1bbd8a586b --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py @@ -0,0 +1,6432 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.cmek_service import CmekServiceAsyncClient +from google.cloud.dataplex_v1.services.cmek_service import CmekServiceClient +from google.cloud.dataplex_v1.services.cmek_service import pagers +from google.cloud.dataplex_v1.services.cmek_service import transports +from google.cloud.dataplex_v1.types import cmek +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CmekServiceClient._get_default_mtls_endpoint(None) is None + assert CmekServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CmekServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CmekServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CmekServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CmekServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert CmekServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CmekServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CmekServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CmekServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CmekServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CmekServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CmekServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CmekServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CmekServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CmekServiceClient._get_client_cert_source(None, False) is None + assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CmekServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) +@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CmekServiceClient._DEFAULT_UNIVERSE + default_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CmekServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT + assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "always") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT + assert CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT + assert CmekServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CmekServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CmekServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CmekServiceClient._get_universe_domain(None, None) == CmekServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CmekServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CmekServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CmekServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (CmekServiceClient, "grpc"), + (CmekServiceAsyncClient, "grpc_asyncio"), + (CmekServiceClient, "rest"), +]) +def test_cmek_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CmekServiceGrpcTransport, "grpc"), + (transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CmekServiceRestTransport, "rest"), +]) +def test_cmek_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CmekServiceClient, "grpc"), + (CmekServiceAsyncClient, "grpc_asyncio"), + (CmekServiceClient, "rest"), +]) +def test_cmek_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_cmek_service_client_get_transport_class(): + transport = CmekServiceClient.get_transport_class() + available_transports = [ + transports.CmekServiceGrpcTransport, + transports.CmekServiceRestTransport, + ] + assert transport in available_transports + + transport = CmekServiceClient.get_transport_class("grpc") + assert transport == transports.CmekServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc"), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (CmekServiceClient, transports.CmekServiceRestTransport, "rest"), +]) +@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) +@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) +def test_cmek_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CmekServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CmekServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", "true"), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", "false"), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CmekServiceClient, transports.CmekServiceRestTransport, "rest", "true"), + (CmekServiceClient, transports.CmekServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) +@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cmek_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CmekServiceClient, CmekServiceAsyncClient +]) +@mock.patch.object(CmekServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CmekServiceClient)) +@mock.patch.object(CmekServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CmekServiceAsyncClient)) +def test_cmek_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CmekServiceClient, CmekServiceAsyncClient +]) +@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) +@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) +def test_cmek_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CmekServiceClient._DEFAULT_UNIVERSE + default_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc"), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (CmekServiceClient, transports.CmekServiceRestTransport, "rest"), +]) +def test_cmek_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", grpc_helpers), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CmekServiceClient, transports.CmekServiceRestTransport, "rest", None), +]) +def test_cmek_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_cmek_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CmekServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", grpc_helpers), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cmek_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + cmek.CreateEncryptionConfigRequest, + dict, +]) +def test_create_encryption_config(request_type, transport: str = 'grpc'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cmek.CreateEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_encryption_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cmek.CreateEncryptionConfigRequest( + parent='parent_value', + encryption_config_id='encryption_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_encryption_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cmek.CreateEncryptionConfigRequest( + parent='parent_value', + encryption_config_id='encryption_config_id_value', + ) + +def test_create_encryption_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_encryption_config] = mock_rpc + request = {} + client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_encryption_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_encryption_config] = mock_rpc + + request = {} + await client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.CreateEncryptionConfigRequest): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cmek.CreateEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_encryption_config_async_from_dict(): + await test_create_encryption_config_async(request_type=dict) + +def test_create_encryption_config_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.CreateEncryptionConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_encryption_config_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.CreateEncryptionConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_encryption_config_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_encryption_config( + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].encryption_config + mock_val = cmek.EncryptionConfig(name='name_value') + assert arg == mock_val + arg = args[0].encryption_config_id + mock_val = 'encryption_config_id_value' + assert arg == mock_val + + +def test_create_encryption_config_flattened_error(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_encryption_config( + cmek.CreateEncryptionConfigRequest(), + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + +@pytest.mark.asyncio +async def test_create_encryption_config_flattened_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_encryption_config( + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].encryption_config + mock_val = cmek.EncryptionConfig(name='name_value') + assert arg == mock_val + arg = args[0].encryption_config_id + mock_val = 'encryption_config_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_encryption_config_flattened_error_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_encryption_config( + cmek.CreateEncryptionConfigRequest(), + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cmek.UpdateEncryptionConfigRequest, + dict, +]) +def test_update_encryption_config(request_type, transport: str = 'grpc'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cmek.UpdateEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_encryption_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cmek.UpdateEncryptionConfigRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_encryption_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cmek.UpdateEncryptionConfigRequest( + ) + +def test_update_encryption_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_encryption_config] = mock_rpc + request = {} + client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_encryption_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_encryption_config] = mock_rpc + + request = {} + await client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.UpdateEncryptionConfigRequest): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cmek.UpdateEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_encryption_config_async_from_dict(): + await test_update_encryption_config_async(request_type=dict) + +def test_update_encryption_config_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.UpdateEncryptionConfigRequest() + + request.encryption_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'encryption_config.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_encryption_config_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.UpdateEncryptionConfigRequest() + + request.encryption_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'encryption_config.name=name_value', + ) in kw['metadata'] + + +def test_update_encryption_config_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_encryption_config( + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].encryption_config + mock_val = cmek.EncryptionConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_encryption_config_flattened_error(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_encryption_config( + cmek.UpdateEncryptionConfigRequest(), + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_encryption_config_flattened_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_encryption_config( + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].encryption_config + mock_val = cmek.EncryptionConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_encryption_config_flattened_error_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_encryption_config( + cmek.UpdateEncryptionConfigRequest(), + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + cmek.DeleteEncryptionConfigRequest, + dict, +]) +def test_delete_encryption_config(request_type, transport: str = 'grpc'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cmek.DeleteEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_encryption_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cmek.DeleteEncryptionConfigRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_encryption_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cmek.DeleteEncryptionConfigRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_encryption_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_encryption_config] = mock_rpc + request = {} + client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_encryption_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_encryption_config] = mock_rpc + + request = {} + await client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.DeleteEncryptionConfigRequest): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cmek.DeleteEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_encryption_config_async_from_dict(): + await test_delete_encryption_config_async(request_type=dict) + +def test_delete_encryption_config_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.DeleteEncryptionConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_encryption_config_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.DeleteEncryptionConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_encryption_config_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_encryption_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_encryption_config_flattened_error(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_encryption_config( + cmek.DeleteEncryptionConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_encryption_config_flattened_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_encryption_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_encryption_config_flattened_error_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_encryption_config( + cmek.DeleteEncryptionConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cmek.ListEncryptionConfigsRequest, + dict, +]) +def test_list_encryption_configs(request_type, transport: str = 'grpc'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.ListEncryptionConfigsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cmek.ListEncryptionConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEncryptionConfigsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_encryption_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cmek.ListEncryptionConfigsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_encryption_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cmek.ListEncryptionConfigsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_encryption_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_encryption_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_encryption_configs] = mock_rpc + request = {} + client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_encryption_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_encryption_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_encryption_configs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_encryption_configs] = mock_rpc + + request = {} + await client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_encryption_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_encryption_configs_async(transport: str = 'grpc_asyncio', request_type=cmek.ListEncryptionConfigsRequest): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cmek.ListEncryptionConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEncryptionConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_encryption_configs_async_from_dict(): + await test_list_encryption_configs_async(request_type=dict) + +def test_list_encryption_configs_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.ListEncryptionConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + call.return_value = cmek.ListEncryptionConfigsResponse() + client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_encryption_configs_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.ListEncryptionConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse()) + await client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_encryption_configs_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.ListEncryptionConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_encryption_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_encryption_configs_flattened_error(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_encryption_configs( + cmek.ListEncryptionConfigsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_encryption_configs_flattened_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.ListEncryptionConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_encryption_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_encryption_configs_flattened_error_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_encryption_configs( + cmek.ListEncryptionConfigsRequest(), + parent='parent_value', + ) + + +def test_list_encryption_configs_pager(transport_name: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + next_page_token='abc', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[], + next_page_token='def', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + ], + next_page_token='ghi', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_encryption_configs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cmek.EncryptionConfig) + for i in results) +def test_list_encryption_configs_pages(transport_name: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + next_page_token='abc', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[], + next_page_token='def', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + ], + next_page_token='ghi', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_encryption_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_encryption_configs_async_pager(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + next_page_token='abc', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[], + next_page_token='def', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + ], + next_page_token='ghi', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_encryption_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cmek.EncryptionConfig) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_encryption_configs_async_pages(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + next_page_token='abc', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[], + next_page_token='def', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + ], + next_page_token='ghi', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_encryption_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cmek.GetEncryptionConfigRequest, + dict, +]) +def test_get_encryption_config(request_type, transport: str = 'grpc'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.EncryptionConfig( + name='name_value', + key='key_value', + encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, + etag='etag_value', + ) + response = client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cmek.GetEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cmek.EncryptionConfig) + assert response.name == 'name_value' + assert response.key == 'key_value' + assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING + assert response.etag == 'etag_value' + + +def test_get_encryption_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cmek.GetEncryptionConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_encryption_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cmek.GetEncryptionConfigRequest( + name='name_value', + ) + +def test_get_encryption_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_encryption_config] = mock_rpc + request = {} + client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_encryption_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_encryption_config] = mock_rpc + + request = {} + await client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.GetEncryptionConfigRequest): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig( + name='name_value', + key='key_value', + encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, + etag='etag_value', + )) + response = await client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cmek.GetEncryptionConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cmek.EncryptionConfig) + assert response.name == 'name_value' + assert response.key == 'key_value' + assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_encryption_config_async_from_dict(): + await test_get_encryption_config_async(request_type=dict) + +def test_get_encryption_config_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.GetEncryptionConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + call.return_value = cmek.EncryptionConfig() + client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_encryption_config_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cmek.GetEncryptionConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig()) + await client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_encryption_config_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.EncryptionConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_encryption_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_encryption_config_flattened_error(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_encryption_config( + cmek.GetEncryptionConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_encryption_config_flattened_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cmek.EncryptionConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_encryption_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_encryption_config_flattened_error_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_encryption_config( + cmek.GetEncryptionConfigRequest(), + name='name_value', + ) + + +def test_create_encryption_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_encryption_config] = mock_rpc + + request = {} + client.create_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_encryption_config_rest_required_fields(request_type=cmek.CreateEncryptionConfigRequest): + transport_class = transports.CmekServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["encryption_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "encryptionConfigId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_encryption_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "encryptionConfigId" in jsonified_request + assert jsonified_request["encryptionConfigId"] == request_init["encryption_config_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["encryptionConfigId"] = 'encryption_config_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_encryption_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("encryption_config_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "encryptionConfigId" in jsonified_request + assert jsonified_request["encryptionConfigId"] == 'encryption_config_id_value' + + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_encryption_config(request) + + expected_params = [ + ( + "encryptionConfigId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_encryption_config_rest_unset_required_fields(): + transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_encryption_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("encryptionConfigId", )) & set(("parent", "encryptionConfigId", "encryptionConfig", ))) + + +def test_create_encryption_config_rest_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_encryption_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/encryptionConfigs" % client.transport._host, args[1]) + + +def test_create_encryption_config_rest_flattened_error(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_encryption_config( + cmek.CreateEncryptionConfigRequest(), + parent='parent_value', + encryption_config=cmek.EncryptionConfig(name='name_value'), + encryption_config_id='encryption_config_id_value', + ) + + +def test_update_encryption_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_encryption_config] = mock_rpc + + request = {} + client.update_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_encryption_config_rest_required_fields(request_type=cmek.UpdateEncryptionConfigRequest): + transport_class = transports.CmekServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_encryption_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_encryption_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_encryption_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_encryption_config_rest_unset_required_fields(): + transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_encryption_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("encryptionConfig", ))) + + +def test_update_encryption_config_rest_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_encryption_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{encryption_config.name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) + + +def test_update_encryption_config_rest_flattened_error(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_encryption_config( + cmek.UpdateEncryptionConfigRequest(), + encryption_config=cmek.EncryptionConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_encryption_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_encryption_config] = mock_rpc + + request = {} + client.delete_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_encryption_config_rest_required_fields(request_type=cmek.DeleteEncryptionConfigRequest): + transport_class = transports.CmekServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_encryption_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_encryption_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_encryption_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_encryption_config_rest_unset_required_fields(): + transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_encryption_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_encryption_config_rest_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_encryption_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) + + +def test_delete_encryption_config_rest_flattened_error(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_encryption_config( + cmek.DeleteEncryptionConfigRequest(), + name='name_value', + ) + + +def test_list_encryption_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_encryption_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_encryption_configs] = mock_rpc + + request = {} + client.list_encryption_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_encryption_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_encryption_configs_rest_required_fields(request_type=cmek.ListEncryptionConfigsRequest): + transport_class = transports.CmekServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_encryption_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_encryption_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cmek.ListEncryptionConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_encryption_configs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_encryption_configs_rest_unset_required_fields(): + transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_encryption_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_encryption_configs_rest_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cmek.ListEncryptionConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_encryption_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/encryptionConfigs" % client.transport._host, args[1]) + + +def test_list_encryption_configs_rest_flattened_error(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_encryption_configs( + cmek.ListEncryptionConfigsRequest(), + parent='parent_value', + ) + + +def test_list_encryption_configs_rest_pager(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + next_page_token='abc', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[], + next_page_token='def', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + ], + next_page_token='ghi', + ), + cmek.ListEncryptionConfigsResponse( + encryption_configs=[ + cmek.EncryptionConfig(), + cmek.EncryptionConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cmek.ListEncryptionConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'organizations/sample1/locations/sample2'} + + pager = client.list_encryption_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cmek.EncryptionConfig) + for i in results) + + pages = list(client.list_encryption_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_encryption_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_encryption_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_encryption_config] = mock_rpc + + request = {} + client.get_encryption_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_encryption_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_encryption_config_rest_required_fields(request_type=cmek.GetEncryptionConfigRequest): + transport_class = transports.CmekServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_encryption_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_encryption_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cmek.EncryptionConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cmek.EncryptionConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_encryption_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_encryption_config_rest_unset_required_fields(): + transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_encryption_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_encryption_config_rest_flattened(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cmek.EncryptionConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cmek.EncryptionConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_encryption_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) + + +def test_get_encryption_config_rest_flattened_error(transport: str = 'rest'): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_encryption_config( + cmek.GetEncryptionConfigRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CmekServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CmekServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CmekServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CmekServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CmekServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CmekServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CmekServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CmekServiceGrpcTransport, + transports.CmekServiceGrpcAsyncIOTransport, + transports.CmekServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = CmekServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_encryption_config_empty_call_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.CreateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_encryption_config_empty_call_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.UpdateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_encryption_config_empty_call_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.DeleteEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_encryption_configs_empty_call_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + call.return_value = cmek.ListEncryptionConfigsResponse() + client.list_encryption_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.ListEncryptionConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_encryption_config_empty_call_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + call.return_value = cmek.EncryptionConfig() + client.get_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.GetEncryptionConfigRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CmekServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_encryption_config_empty_call_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.CreateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_encryption_config_empty_call_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.UpdateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_encryption_config_empty_call_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.DeleteEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_encryption_configs_empty_call_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_encryption_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.ListEncryptionConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_encryption_config_empty_call_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig( + name='name_value', + key='key_value', + encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, + etag='etag_value', + )) + await client.get_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.GetEncryptionConfigRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CmekServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_encryption_config_rest_bad_request(request_type=cmek.CreateEncryptionConfigRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_encryption_config(request) + + +@pytest.mark.parametrize("request_type", [ + cmek.CreateEncryptionConfigRequest, + dict, +]) +def test_create_encryption_config_rest_call_success(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request_init["encryption_config"] = {'name': 'name_value', 'key': 'key_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'encryption_state': 1, 'etag': 'etag_value', 'failure_details': {'error_code': 1, 'error_message': 'error_message_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cmek.CreateEncryptionConfigRequest.meta.fields["encryption_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["encryption_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["encryption_config"][field])): + del request_init["encryption_config"][field][i][subfield] + else: + del request_init["encryption_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_encryption_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_encryption_config_rest_interceptors(null_interceptor): + transport = transports.CmekServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), + ) + client = CmekServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_create_encryption_config") as post, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_create_encryption_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "pre_create_encryption_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cmek.CreateEncryptionConfigRequest.pb(cmek.CreateEncryptionConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cmek.CreateEncryptionConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_encryption_config_rest_bad_request(request_type=cmek.UpdateEncryptionConfigRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_encryption_config(request) + + +@pytest.mark.parametrize("request_type", [ + cmek.UpdateEncryptionConfigRequest, + dict, +]) +def test_update_encryption_config_rest_call_success(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} + request_init["encryption_config"] = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3', 'key': 'key_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'encryption_state': 1, 'etag': 'etag_value', 'failure_details': {'error_code': 1, 'error_message': 'error_message_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cmek.UpdateEncryptionConfigRequest.meta.fields["encryption_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["encryption_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["encryption_config"][field])): + del request_init["encryption_config"][field][i][subfield] + else: + del request_init["encryption_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_encryption_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_encryption_config_rest_interceptors(null_interceptor): + transport = transports.CmekServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), + ) + client = CmekServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_update_encryption_config") as post, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_update_encryption_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "pre_update_encryption_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cmek.UpdateEncryptionConfigRequest.pb(cmek.UpdateEncryptionConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cmek.UpdateEncryptionConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_encryption_config_rest_bad_request(request_type=cmek.DeleteEncryptionConfigRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_encryption_config(request) + + +@pytest.mark.parametrize("request_type", [ + cmek.DeleteEncryptionConfigRequest, + dict, +]) +def test_delete_encryption_config_rest_call_success(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_encryption_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_encryption_config_rest_interceptors(null_interceptor): + transport = transports.CmekServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), + ) + client = CmekServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_delete_encryption_config") as post, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_delete_encryption_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "pre_delete_encryption_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cmek.DeleteEncryptionConfigRequest.pb(cmek.DeleteEncryptionConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cmek.DeleteEncryptionConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_encryption_configs_rest_bad_request(request_type=cmek.ListEncryptionConfigsRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_encryption_configs(request) + + +@pytest.mark.parametrize("request_type", [ + cmek.ListEncryptionConfigsRequest, + dict, +]) +def test_list_encryption_configs_rest_call_success(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'organizations/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cmek.ListEncryptionConfigsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_encryption_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEncryptionConfigsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_encryption_configs_rest_interceptors(null_interceptor): + transport = transports.CmekServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), + ) + client = CmekServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_list_encryption_configs") as post, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_list_encryption_configs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "pre_list_encryption_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cmek.ListEncryptionConfigsRequest.pb(cmek.ListEncryptionConfigsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cmek.ListEncryptionConfigsResponse.to_json(cmek.ListEncryptionConfigsResponse()) + req.return_value.content = return_value + + request = cmek.ListEncryptionConfigsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cmek.ListEncryptionConfigsResponse() + post_with_metadata.return_value = cmek.ListEncryptionConfigsResponse(), metadata + + client.list_encryption_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_encryption_config_rest_bad_request(request_type=cmek.GetEncryptionConfigRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_encryption_config(request) + + +@pytest.mark.parametrize("request_type", [ + cmek.GetEncryptionConfigRequest, + dict, +]) +def test_get_encryption_config_rest_call_success(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cmek.EncryptionConfig( + name='name_value', + key='key_value', + encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cmek.EncryptionConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_encryption_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cmek.EncryptionConfig) + assert response.name == 'name_value' + assert response.key == 'key_value' + assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_encryption_config_rest_interceptors(null_interceptor): + transport = transports.CmekServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), + ) + client = CmekServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_get_encryption_config") as post, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "post_get_encryption_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CmekServiceRestInterceptor, "pre_get_encryption_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cmek.GetEncryptionConfigRequest.pb(cmek.GetEncryptionConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cmek.EncryptionConfig.to_json(cmek.EncryptionConfig()) + req.return_value.content = return_value + + request = cmek.GetEncryptionConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cmek.EncryptionConfig() + post_with_metadata.return_value = cmek.EncryptionConfig(), metadata + + client.get_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_encryption_config_empty_call_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_encryption_config), + '__call__') as call: + client.create_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.CreateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_encryption_config_empty_call_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_encryption_config), + '__call__') as call: + client.update_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.UpdateEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_encryption_config_empty_call_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_encryption_config), + '__call__') as call: + client.delete_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.DeleteEncryptionConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_encryption_configs_empty_call_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_encryption_configs), + '__call__') as call: + client.list_encryption_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.ListEncryptionConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_encryption_config_empty_call_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_encryption_config), + '__call__') as call: + client.get_encryption_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cmek.GetEncryptionConfigRequest() + + assert args[0] == request_msg + + +def test_cmek_service_rest_lro_client(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CmekServiceGrpcTransport, + ) + +def test_cmek_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CmekServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cmek_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CmekServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_encryption_config', + 'update_encryption_config', + 'delete_encryption_config', + 'list_encryption_configs', + 'get_encryption_config', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cmek_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CmekServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cmek_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CmekServiceTransport() + adc.assert_called_once() + + +def test_cmek_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CmekServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CmekServiceGrpcTransport, + transports.CmekServiceGrpcAsyncIOTransport, + ], +) +def test_cmek_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CmekServiceGrpcTransport, + transports.CmekServiceGrpcAsyncIOTransport, + transports.CmekServiceRestTransport, + ], +) +def test_cmek_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CmekServiceGrpcTransport, grpc_helpers), + (transports.CmekServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cmek_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) +def test_cmek_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cmek_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CmekServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cmek_service_host_no_port(transport_name): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cmek_service_host_with_port(transport_name): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cmek_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CmekServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CmekServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_encryption_config._session + session2 = client2.transport.create_encryption_config._session + assert session1 != session2 + session1 = client1.transport.update_encryption_config._session + session2 = client2.transport.update_encryption_config._session + assert session1 != session2 + session1 = client1.transport.delete_encryption_config._session + session2 = client2.transport.delete_encryption_config._session + assert session1 != session2 + session1 = client1.transport.list_encryption_configs._session + session2 = client2.transport.list_encryption_configs._session + assert session1 != session2 + session1 = client1.transport.get_encryption_config._session + session2 = client2.transport.get_encryption_config._session + assert session1 != session2 +def test_cmek_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CmekServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cmek_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CmekServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) +def test_cmek_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) +def test_cmek_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cmek_service_grpc_lro_client(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cmek_service_grpc_lro_async_client(): + client = CmekServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_encryption_config_path(): + organization = "squid" + location = "clam" + encryption_config = "whelk" + expected = "organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config}".format(organization=organization, location=location, encryption_config=encryption_config, ) + actual = CmekServiceClient.encryption_config_path(organization, location, encryption_config) + assert expected == actual + + +def test_parse_encryption_config_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "encryption_config": "nudibranch", + } + path = CmekServiceClient.encryption_config_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_encryption_config_path(path) + assert expected == actual + +def test_organization_location_path(): + organization = "cuttlefish" + location = "mussel" + expected = "organizations/{organization}/locations/{location}".format(organization=organization, location=location, ) + actual = CmekServiceClient.organization_location_path(organization, location) + assert expected == actual + + +def test_parse_organization_location_path(): + expected = { + "organization": "winkle", + "location": "nautilus", + } + path = CmekServiceClient.organization_location_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_organization_location_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CmekServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CmekServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = CmekServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CmekServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CmekServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CmekServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = CmekServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CmekServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CmekServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CmekServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CmekServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CmekServiceTransport, '_prep_wrapped_messages') as prep: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CmekServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = CmekServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CmekServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CmekServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CmekServiceClient, transports.CmekServiceGrpcTransport), + (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py new file mode 100644 index 000000000000..10e4382404e0 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -0,0 +1,8115 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.content_service import ContentServiceAsyncClient +from google.cloud.dataplex_v1.services.content_service import ContentServiceClient +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.services.content_service import transports +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ContentServiceClient._get_default_mtls_endpoint(None) is None + assert ContentServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ContentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ContentServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ContentServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ContentServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ContentServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ContentServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ContentServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ContentServiceClient._get_client_cert_source(None, False) is None + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ContentServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ContentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ContentServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT + assert ContentServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ContentServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ContentServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ContentServiceClient._get_universe_domain(None, None) == ContentServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ContentServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (ContentServiceClient, "grpc"), + (ContentServiceAsyncClient, "grpc_asyncio"), + (ContentServiceClient, "rest"), +]) +def test_content_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ContentServiceGrpcTransport, "grpc"), + (transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ContentServiceRestTransport, "rest"), +]) +def test_content_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ContentServiceClient, "grpc"), + (ContentServiceAsyncClient, "grpc_asyncio"), + (ContentServiceClient, "rest"), +]) +def test_content_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_content_service_client_get_transport_class(): + transport = ContentServiceClient.get_transport_class() + available_transports = [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceRestTransport, + ] + assert transport in available_transports + + transport = ContentServiceClient.get_transport_class("grpc") + assert transport == transports.ContentServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (ContentServiceClient, transports.ContentServiceRestTransport, "rest"), +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test_content_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "true"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "false"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ContentServiceClient, transports.ContentServiceRestTransport, "rest", "true"), + (ContentServiceClient, transports.ContentServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_content_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ContentServiceClient, ContentServiceAsyncClient +]) +@mock.patch.object(ContentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceAsyncClient)) +def test_content_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ContentServiceClient, ContentServiceAsyncClient +]) +@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) +@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) +def test_content_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ContentServiceClient._DEFAULT_UNIVERSE + default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (ContentServiceClient, transports.ContentServiceRestTransport, "rest"), +]) +def test_content_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ContentServiceClient, transports.ContentServiceRestTransport, "rest", None), +]) +def test_content_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_content_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ContentServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_content_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.CreateContentRequest, + dict, +]) +def test_create_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_content.CreateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_create_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_content.CreateContentRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.CreateContentRequest( + parent='parent_value', + ) + +def test_create_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_content] = mock_rpc + request = {} + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_content] = mock_rpc + + request = {} + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.CreateContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_content.CreateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_content_async_from_dict(): + await test_create_content_async(request_type=dict) + +def test_create_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = analyze.Content() + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_content( + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + + +def test_create_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_content( + gcd_content.CreateContentRequest(), + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_content( + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_content( + gcd_content.CreateContentRequest(), + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.UpdateContentRequest, + dict, +]) +def test_update_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_content.UpdateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_update_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_content.UpdateContentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.UpdateContentRequest( + ) + +def test_update_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_content] = mock_rpc + request = {} + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_content] = mock_rpc + + request = {} + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.UpdateContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_content.UpdateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_content_async_from_dict(): + await test_update_content_async(request_type=dict) + +def test_update_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = analyze.Content() + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'content.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'content.name=name_value', + ) in kw['metadata'] + + +def test_update_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_content( + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_content( + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + content.DeleteContentRequest, + dict, +]) +def test_delete_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.DeleteContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.DeleteContentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.DeleteContentRequest( + name='name_value', + ) + +def test_delete_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc + request = {} + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_content] = mock_rpc + + request = {} + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_content_async(transport: str = 'grpc_asyncio', request_type=content.DeleteContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.DeleteContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_content_async_from_dict(): + await test_delete_content_async(request_type=dict) + +def test_delete_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = None + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_content( + content.DeleteContentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_content( + content.DeleteContentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + content.GetContentRequest, + dict, +]) +def test_get_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + response = client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.GetContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +def test_get_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.GetContentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.GetContentRequest( + name='name_value', + ) + +def test_get_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_content] = mock_rpc + request = {} + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_content] = mock_rpc + + request = {} + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_content_async(transport: str = 'grpc_asyncio', request_type=content.GetContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + response = await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.GetContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_content_async_from_dict(): + await test_get_content_async(request_type=dict) + +def test_get_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = analyze.Content() + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_content( + content.GetContentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_content( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_content( + content.GetContentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + content.ListContentRequest, + dict, +]) +def test_list_content(request_type, transport: str = 'grpc'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse( + next_page_token='next_page_token_value', + ) + response = client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = content.ListContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = content.ListContentRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.ListContentRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_content] = mock_rpc + request = {} + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_content in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_content] = mock_rpc + + request = {} + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_content_async(transport: str = 'grpc_asyncio', request_type=content.ListContentRequest): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = content.ListContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_content_async_from_dict(): + await test_list_content_async(request_type=dict) + +def test_list_content_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = content.ListContentResponse() + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_content_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_content( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_content_flattened_error(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_content( + content.ListContentRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_content( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_content( + content.ListContentRequest(), + parent='parent_value', + ) + + +def test_list_content_pager(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_content(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Content) + for i in results) +def test_list_content_pages(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + pages = list(client.list_content(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_content_async_pager(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_content(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Content) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_content_async_pages(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_content(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_content] = mock_rpc + + request = {} + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_content_rest_required_fields(request_type=gcd_content.CreateContentRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_content_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "content", ))) + + +def test_create_content_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/contentitems" % client.transport._host, args[1]) + + +def test_create_content_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_content( + gcd_content.CreateContentRequest(), + parent='parent_value', + content=analyze.Content(name='name_value'), + ) + + +def test_update_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_content] = mock_rpc + + request = {} + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_content_rest_required_fields(request_type=gcd_content.UpdateContentRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_content_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "content", ))) + + +def test_update_content_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + + # get arguments that satisfy an http rule for this method + sample_request = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{content.name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) + + +def test_update_content_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc + + request = {} + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_content_rest_required_fields(request_type=content.DeleteContentRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_content_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_content_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) + + +def test_delete_content_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_content( + content.DeleteContentRequest(), + name='name_value', + ) + + +def test_get_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_content] = mock_rpc + + request = {} + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_content_rest_required_fields(request_type=content.GetContentRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_content_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view", )) & set(("name", ))) + + +def test_get_content_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) + + +def test_get_content_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_content( + content.GetContentRequest(), + name='name_value', + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("options", )) & set(("resource", ))) + + +def test_get_iam_policy_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + jsonified_request["permissions"] = 'permissions_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == 'permissions_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) + + +def test_list_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_content] = mock_rpc + + request = {} + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_content_rest_required_fields(request_type=content.ListContentRequest): + transport_class = transports.ContentServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = content.ListContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = content.ListContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_content(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_content_rest_unset_required_fields(): + transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_content_rest_flattened(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = content.ListContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = content.ListContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/contentitems" % client.transport._host, args[1]) + + +def test_list_content_rest_flattened_error(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_content( + content.ListContentRequest(), + parent='parent_value', + ) + + +def test_list_content_rest_pager(transport: str = 'rest'): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + analyze.Content(), + ], + next_page_token='abc', + ), + content.ListContentResponse( + content=[], + next_page_token='def', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + ], + next_page_token='ghi', + ), + content.ListContentResponse( + content=[ + analyze.Content(), + analyze.Content(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(content.ListContentResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + pager = client.list_content(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Content) + for i in results) + + pages = list(client.list_content(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ContentServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ContentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + transports.ContentServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = ContentServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + call.return_value = analyze.Content() + client.create_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.CreateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + call.return_value = analyze.Content() + client.update_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.UpdateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + call.return_value = None + client.delete_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.DeleteContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + call.return_value = analyze.Content() + client.get_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.GetContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_content_empty_call_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + call.return_value = content.ListContentResponse() + client.list_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.ListContentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ContentServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.create_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.CreateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.update_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.UpdateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.DeleteContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + )) + await client.get_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.GetContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_content_empty_call_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( + next_page_token='next_page_token_value', + )) + await client.list_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.ListContentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ContentServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_content_rest_bad_request(request_type=gcd_content.CreateContentRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_content(request) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.CreateContentRequest, + dict, +]) +def test_create_content_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request_init["content"] = {'name': 'name_value', 'uid': 'uid_value', 'path': 'path_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'data_text': 'data_text_value', 'sql_script': {'engine': 2}, 'notebook': {'kernel_type': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcd_content.CreateContentRequest.meta.fields["content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["content"][field])): + del request_init["content"][field][i][subfield] + else: + del request_init["content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_content_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_create_content") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_create_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_create_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcd_content.CreateContentRequest.pb(gcd_content.CreateContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = analyze.Content.to_json(analyze.Content()) + req.return_value.content = return_value + + request = gcd_content.CreateContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analyze.Content() + post_with_metadata.return_value = analyze.Content(), metadata + + client.create_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_content_rest_bad_request(request_type=gcd_content.UpdateContentRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_content(request) + + +@pytest.mark.parametrize("request_type", [ + gcd_content.UpdateContentRequest, + dict, +]) +def test_update_content_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} + request_init["content"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4', 'uid': 'uid_value', 'path': 'path_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'data_text': 'data_text_value', 'sql_script': {'engine': 2}, 'notebook': {'kernel_type': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcd_content.UpdateContentRequest.meta.fields["content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["content"][field])): + del request_init["content"][field][i][subfield] + else: + del request_init["content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_content_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_update_content") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_update_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_update_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcd_content.UpdateContentRequest.pb(gcd_content.UpdateContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = analyze.Content.to_json(analyze.Content()) + req.return_value.content = return_value + + request = gcd_content.UpdateContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analyze.Content() + post_with_metadata.return_value = analyze.Content(), metadata + + client.update_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_content_rest_bad_request(request_type=content.DeleteContentRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_content(request) + + +@pytest.mark.parametrize("request_type", [ + content.DeleteContentRequest, + dict, +]) +def test_delete_content_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_content(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_content_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_delete_content") as pre: + pre.assert_not_called() + pb_message = content.DeleteContentRequest.pb(content.DeleteContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = content.DeleteContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_content_rest_bad_request(request_type=content.GetContentRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_content(request) + + +@pytest.mark.parametrize("request_type", [ + content.GetContentRequest, + dict, +]) +def test_get_content_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Content( + name='name_value', + uid='uid_value', + path='path_value', + description='description_value', + data_text='data_text_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Content.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.path == 'path_value' + assert response.description == 'description_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_content_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_content") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_get_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = content.GetContentRequest.pb(content.GetContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = analyze.Content.to_json(analyze.Content()) + req.return_value.content = return_value + + request = content.GetContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analyze.Content() + post_with_metadata.return_value = analyze.Content(), metadata + + client.get_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) + req.return_value.content = return_value + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_content_rest_bad_request(request_type=content.ListContentRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_content(request) + + +@pytest.mark.parametrize("request_type", [ + content.ListContentRequest, + dict, +]) +def test_list_content_rest_call_success(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = content.ListContentResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = content.ListContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_content_rest_interceptors(null_interceptor): + transport = transports.ContentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), + ) + client = ContentServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_list_content") as post, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "post_list_content_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.ContentServiceRestInterceptor, "pre_list_content") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = content.ListContentRequest.pb(content.ListContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = content.ListContentResponse.to_json(content.ListContentResponse()) + req.return_value.content = return_value + + request = content.ListContentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = content.ListContentResponse() + post_with_metadata.return_value = content.ListContentResponse(), metadata + + client.list_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_content_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_content), + '__call__') as call: + client.create_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.CreateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_content_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_content), + '__call__') as call: + client.update_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_content.UpdateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_content_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_content), + '__call__') as call: + client.delete_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.DeleteContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_content_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_content), + '__call__') as call: + client.get_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.GetContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_content_empty_call_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_content), + '__call__') as call: + client.list_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = content.ListContentRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ContentServiceGrpcTransport, + ) + +def test_content_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_content_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_content', + 'update_content', + 'delete_content', + 'get_content', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'list_content', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_content_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_content_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport() + adc.assert_called_once() + + +def test_content_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ContentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + transports.ContentServiceRestTransport, + ], +) +def test_content_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ContentServiceGrpcTransport, grpc_helpers), + (transports.ContentServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_content_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_content_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ContentServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_content_service_host_no_port(transport_name): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_content_service_host_with_port(transport_name): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_content_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ContentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ContentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_content._session + session2 = client2.transport.create_content._session + assert session1 != session2 + session1 = client1.transport.update_content._session + session2 = client2.transport.update_content._session + assert session1 != session2 + session1 = client1.transport.delete_content._session + session2 = client2.transport.delete_content._session + assert session1 != session2 + session1 = client1.transport.get_content._session + session2 = client2.transport.get_content._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.list_content._session + session2 = client2.transport.list_content._session + assert session1 != session2 +def test_content_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_content_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) +def test_content_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_content_path(): + project = "squid" + location = "clam" + lake = "whelk" + content = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) + actual = ContentServiceClient.content_path(project, location, lake, content) + assert expected == actual + + +def test_parse_content_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "content": "mussel", + } + path = ContentServiceClient.content_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_content_path(path) + assert expected == actual + +def test_lake_path(): + project = "winkle" + location = "nautilus" + lake = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + actual = ContentServiceClient.lake_path(project, location, lake) + assert expected == actual + + +def test_parse_lake_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + } + path = ContentServiceClient.lake_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_lake_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ContentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ContentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = ContentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ContentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ContentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ContentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = ContentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ContentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ContentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ContentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = ContentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ContentServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ContentServiceClient, transports.ContentServiceGrpcTransport), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py new file mode 100644 index 000000000000..8b86e6fafa7e --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -0,0 +1,9435 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceAsyncClient +from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceClient +from google.cloud.dataplex_v1.services.data_scan_service import pagers +from google.cloud.dataplex_v1.services.data_scan_service import transports +from google.cloud.dataplex_v1.types import data_discovery +from google.cloud.dataplex_v1.types import data_profile +from google.cloud.dataplex_v1.types import data_quality +from google.cloud.dataplex_v1.types import datascans +from google.cloud.dataplex_v1.types import datascans_common +from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataScanServiceClient._get_default_mtls_endpoint(None) is None + assert DataScanServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataScanServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataScanServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataScanServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataScanServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataScanServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataScanServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataScanServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataScanServiceClient._get_client_cert_source(None, False) is None + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataScanServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataScanServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataScanServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataScanServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataScanServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataScanServiceClient._get_universe_domain(None, None) == DataScanServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataScanServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataScanServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataScanServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataScanServiceClient, "grpc"), + (DataScanServiceAsyncClient, "grpc_asyncio"), + (DataScanServiceClient, "rest"), +]) +def test_data_scan_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataScanServiceGrpcTransport, "grpc"), + (transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataScanServiceRestTransport, "rest"), +]) +def test_data_scan_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataScanServiceClient, "grpc"), + (DataScanServiceAsyncClient, "grpc_asyncio"), + (DataScanServiceClient, "rest"), +]) +def test_data_scan_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_data_scan_service_client_get_transport_class(): + transport = DataScanServiceClient.get_transport_class() + available_transports = [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceRestTransport, + ] + assert transport in available_transports + + transport = DataScanServiceClient.get_transport_class("grpc") + assert transport == transports.DataScanServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest"), +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test_data_scan_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "true"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "false"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", "true"), + (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_scan_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataScanServiceClient, DataScanServiceAsyncClient +]) +@mock.patch.object(DataScanServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceAsyncClient)) +def test_data_scan_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataScanServiceClient, DataScanServiceAsyncClient +]) +@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) +@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) +def test_data_scan_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataScanServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest"), +]) +def test_data_scan_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", None), +]) +def test_data_scan_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_scan_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataScanServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_scan_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.CreateDataScanRequest, + dict, +]) +def test_create_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.CreateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.CreateDataScanRequest( + parent='parent_value', + data_scan_id='data_scan_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.CreateDataScanRequest( + parent='parent_value', + data_scan_id='data_scan_id_value', + ) + +def test_create_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc + request = {} + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_scan] = mock_rpc + + request = {} + await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.CreateDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.CreateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_scan_async_from_dict(): + await test_create_data_scan_async(request_type=dict) + +def test_create_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.CreateDataScanRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.CreateDataScanRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_scan( + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].data_scan_id + mock_val = 'data_scan_id_value' + assert arg == mock_val + + +def test_create_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_scan( + datascans.CreateDataScanRequest(), + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_scan( + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].data_scan_id + mock_val = 'data_scan_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_scan( + datascans.CreateDataScanRequest(), + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.UpdateDataScanRequest, + dict, +]) +def test_update_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.UpdateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.UpdateDataScanRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.UpdateDataScanRequest( + ) + +def test_update_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc + request = {} + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_scan] = mock_rpc + + request = {} + await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.UpdateDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.UpdateDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_scan_async_from_dict(): + await test_update_data_scan_async(request_type=dict) + +def test_update_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.UpdateDataScanRequest() + + request.data_scan.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_scan.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.UpdateDataScanRequest() + + request.data_scan.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_scan.name=name_value', + ) in kw['metadata'] + + +def test_update_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_scan( + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_scan( + datascans.UpdateDataScanRequest(), + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_scan( + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_scan + mock_val = datascans.DataScan(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_scan( + datascans.UpdateDataScanRequest(), + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.DeleteDataScanRequest, + dict, +]) +def test_delete_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.DeleteDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.DeleteDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.DeleteDataScanRequest( + name='name_value', + ) + +def test_delete_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc + request = {} + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_scan] = mock_rpc + + request = {} + await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.DeleteDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.DeleteDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_scan_async_from_dict(): + await test_delete_data_scan_async(request_type=dict) + +def test_delete_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.DeleteDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.DeleteDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_scan( + datascans.DeleteDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_scan( + datascans.DeleteDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanRequest, + dict, +]) +def test_get_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + ) + response = client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScan) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +def test_get_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GetDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GetDataScanRequest( + name='name_value', + ) + +def test_get_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc + request = {} + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_scan] = mock_rpc + + request = {} + await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + )) + response = await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScan) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.asyncio +async def test_get_data_scan_async_from_dict(): + await test_get_data_scan_async(request_type=dict) + +def test_get_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = datascans.DataScan() + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) + await client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan( + datascans.GetDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_scan( + datascans.GetDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScansRequest, + dict, +]) +def test_list_data_scans(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScansPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_data_scans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.ListDataScansRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_scans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.ListDataScansRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_scans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc + request = {} + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scans_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_scans in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_scans] = mock_rpc + + request = {} + await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scans_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScansRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScansAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_data_scans_async_from_dict(): + await test_list_data_scans_async(request_type=dict) + +def test_list_data_scans_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScansRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = datascans.ListDataScansResponse() + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_scans_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScansRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) + await client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_scans_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_scans( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_scans_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scans( + datascans.ListDataScansRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_scans_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_scans( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_scans_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_scans( + datascans.ListDataScansRequest(), + parent='parent_value', + ) + + +def test_list_data_scans_pager(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_scans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScan) + for i in results) +def test_list_data_scans_pages(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_scans(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_scans_async_pager(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_scans(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datascans.DataScan) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_scans_async_pages(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_scans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datascans.RunDataScanRequest, + dict, +]) +def test_run_data_scan(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse( + ) + response = client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.RunDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.RunDataScanResponse) + + +def test_run_data_scan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.RunDataScanRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_data_scan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.RunDataScanRequest( + name='name_value', + ) + +def test_run_data_scan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc + request = {} + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_data_scan in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_data_scan] = mock_rpc + + request = {} + await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.RunDataScanRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( + )) + response = await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.RunDataScanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.RunDataScanResponse) + + +@pytest.mark.asyncio +async def test_run_data_scan_async_from_dict(): + await test_run_data_scan_async(request_type=dict) + +def test_run_data_scan_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.RunDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = datascans.RunDataScanResponse() + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_data_scan_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.RunDataScanRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) + await client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_run_data_scan_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_run_data_scan_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_data_scan( + datascans.RunDataScanRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_run_data_scan_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.RunDataScanResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_data_scan( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_data_scan_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_data_scan( + datascans.RunDataScanRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanJobRequest, + dict, +]) +def test_get_data_scan_job(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + ) + response = client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScanJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == datascans.DataScanJob.State.RUNNING + assert response.message == 'message_value' + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GetDataScanJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_scan_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GetDataScanJobRequest( + name='name_value', + ) + +def test_get_data_scan_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc + request = {} + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_scan_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_scan_job] = mock_rpc + + request = {} + await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_scan_job_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanJobRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + )) + response = await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GetDataScanJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScanJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == datascans.DataScanJob.State.RUNNING + assert response.message == 'message_value' + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.asyncio +async def test_get_data_scan_job_async_from_dict(): + await test_get_data_scan_job_async(request_type=dict) + +def test_get_data_scan_job_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = datascans.DataScanJob() + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_scan_job_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GetDataScanJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) + await client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_scan_job_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_scan_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_scan_job_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan_job( + datascans.GetDataScanJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_scan_job_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.DataScanJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_scan_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_scan_job_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_scan_job( + datascans.GetDataScanJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScanJobsRequest, + dict, +]) +def test_list_data_scan_jobs(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScanJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScanJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.ListDataScanJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_scan_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.ListDataScanJobsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_data_scan_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc + request = {} + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_scan_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_scan_jobs] = mock_rpc + + request = {} + await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScanJobsRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.ListDataScanJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScanJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_from_dict(): + await test_list_data_scan_jobs_async(request_type=dict) + +def test_list_data_scan_jobs_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScanJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = datascans.ListDataScanJobsResponse() + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.ListDataScanJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) + await client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_scan_jobs_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_scan_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_scan_jobs_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scan_jobs( + datascans.ListDataScanJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.ListDataScanJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_scan_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_scan_jobs( + datascans.ListDataScanJobsRequest(), + parent='parent_value', + ) + + +def test_list_data_scan_jobs_pager(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_scan_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScanJob) + for i in results) +def test_list_data_scan_jobs_pages(transport_name: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_scan_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_pager(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_scan_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datascans.DataScanJob) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_scan_jobs_async_pages(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_scan_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datascans.GenerateDataQualityRulesRequest, + dict, +]) +def test_generate_data_quality_rules(request_type, transport: str = 'grpc'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse( + ) + response = client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datascans.GenerateDataQualityRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.GenerateDataQualityRulesResponse) + + +def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datascans.GenerateDataQualityRulesRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.generate_data_quality_rules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datascans.GenerateDataQualityRulesRequest( + name='name_value', + ) + +def test_generate_data_quality_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc + request = {} + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_data_quality_rules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_data_quality_rules] = mock_rpc + + request = {} + await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async(transport: str = 'grpc_asyncio', request_type=datascans.GenerateDataQualityRulesRequest): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( + )) + response = await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datascans.GenerateDataQualityRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.GenerateDataQualityRulesResponse) + + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_async_from_dict(): + await test_generate_data_quality_rules_async(request_type=dict) + +def test_generate_data_quality_rules_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GenerateDataQualityRulesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = datascans.GenerateDataQualityRulesResponse() + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datascans.GenerateDataQualityRulesRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) + await client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_generate_data_quality_rules_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_data_quality_rules( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_generate_data_quality_rules_flattened_error(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_data_quality_rules( + datascans.GenerateDataQualityRulesRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_flattened_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datascans.GenerateDataQualityRulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_data_quality_rules( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_generate_data_quality_rules_flattened_error_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_data_quality_rules( + datascans.GenerateDataQualityRulesRequest(), + name='name_value', + ) + + +def test_create_data_scan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc + + request = {} + client.create_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_scan_rest_required_fields(request_type=datascans.CreateDataScanRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_scan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "dataScanId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataScanId" in jsonified_request + assert jsonified_request["dataScanId"] == request_init["data_scan_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["dataScanId"] = 'data_scan_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_scan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_scan_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "dataScanId" in jsonified_request + assert jsonified_request["dataScanId"] == 'data_scan_id_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_scan(request) + + expected_params = [ + ( + "dataScanId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_data_scan_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_data_scan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("dataScanId", "validateOnly", )) & set(("parent", "dataScan", "dataScanId", ))) + + +def test_create_data_scan_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_scan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataScans" % client.transport._host, args[1]) + + +def test_create_data_scan_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_scan( + datascans.CreateDataScanRequest(), + parent='parent_value', + data_scan=datascans.DataScan(name='name_value'), + data_scan_id='data_scan_id_value', + ) + + +def test_update_data_scan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc + + request = {} + client.update_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_scan_rest_required_fields(request_type=datascans.UpdateDataScanRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_scan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_scan(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_data_scan_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_data_scan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("dataScan", ))) + + +def test_update_data_scan_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_scan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{data_scan.name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) + + +def test_update_data_scan_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_scan( + datascans.UpdateDataScanRequest(), + data_scan=datascans.DataScan(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_data_scan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc + + request = {} + client.delete_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_scan_rest_required_fields(request_type=datascans.DeleteDataScanRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_scan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_scan(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_data_scan_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_data_scan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force", )) & set(("name", ))) + + +def test_delete_data_scan_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_scan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) + + +def test_delete_data_scan_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_scan( + datascans.DeleteDataScanRequest(), + name='name_value', + ) + + +def test_get_data_scan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc + + request = {} + client.get_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_scan_rest_required_fields(request_type=datascans.GetDataScanRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.DataScan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.DataScan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_scan(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_data_scan_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_data_scan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view", )) & set(("name", ))) + + +def test_get_data_scan_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.DataScan() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.DataScan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_scan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) + + +def test_get_data_scan_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan( + datascans.GetDataScanRequest(), + name='name_value', + ) + + +def test_list_data_scans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc + + request = {} + client.list_data_scans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_scans_rest_required_fields(request_type=datascans.ListDataScansRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.ListDataScansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_scans(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_data_scans_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_data_scans._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_data_scans_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScansResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.ListDataScansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_scans(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataScans" % client.transport._host, args[1]) + + +def test_list_data_scans_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scans( + datascans.ListDataScansRequest(), + parent='parent_value', + ) + + +def test_list_data_scans_rest_pager(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + datascans.DataScan(), + ], + next_page_token='abc', + ), + datascans.ListDataScansResponse( + data_scans=[], + next_page_token='def', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + ], + next_page_token='ghi', + ), + datascans.ListDataScansResponse( + data_scans=[ + datascans.DataScan(), + datascans.DataScan(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datascans.ListDataScansResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_data_scans(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScan) + for i in results) + + pages = list(client.list_data_scans(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_run_data_scan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_data_scan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc + + request = {} + client.run_data_scan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_data_scan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_run_data_scan_rest_required_fields(request_type=datascans.RunDataScanRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_data_scan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.RunDataScanResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.RunDataScanResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.run_data_scan(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_data_scan_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_data_scan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_run_data_scan_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.RunDataScanResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.RunDataScanResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.run_data_scan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}:run" % client.transport._host, args[1]) + + +def test_run_data_scan_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_data_scan( + datascans.RunDataScanRequest(), + name='name_value', + ) + + +def test_get_data_scan_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_scan_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc + + request = {} + client.get_data_scan_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_scan_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_scan_job_rest_required_fields(request_type=datascans.GetDataScanJobRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.DataScanJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.DataScanJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_scan_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_data_scan_job_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_data_scan_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view", )) & set(("name", ))) + + +def test_get_data_scan_job_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.DataScanJob() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.DataScanJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_scan_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}" % client.transport._host, args[1]) + + +def test_get_data_scan_job_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_scan_job( + datascans.GetDataScanJobRequest(), + name='name_value', + ) + + +def test_list_data_scan_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc + + request = {} + client.list_data_scan_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_scan_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_scan_jobs_rest_required_fields(request_type=datascans.ListDataScanJobsRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scan_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scan_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScanJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.ListDataScanJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_scan_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_data_scan_jobs_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_data_scan_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_data_scan_jobs_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScanJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.ListDataScanJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_scan_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataScans/*}/jobs" % client.transport._host, args[1]) + + +def test_list_data_scan_jobs_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_scan_jobs( + datascans.ListDataScanJobsRequest(), + parent='parent_value', + ) + + +def test_list_data_scan_jobs_rest_pager(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + next_page_token='abc', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[], + next_page_token='def', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + ], + next_page_token='ghi', + ), + datascans.ListDataScanJobsResponse( + data_scan_jobs=[ + datascans.DataScanJob(), + datascans.DataScanJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datascans.ListDataScanJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} + + pager = client.list_data_scan_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datascans.DataScanJob) + for i in results) + + pages = list(client.list_data_scan_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_generate_data_quality_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc + + request = {} + client.generate_data_quality_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_data_quality_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_data_quality_rules_rest_required_fields(request_type=datascans.GenerateDataQualityRulesRequest): + transport_class = transports.DataScanServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_data_quality_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_data_quality_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datascans.GenerateDataQualityRulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.generate_data_quality_rules(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_generate_data_quality_rules_rest_unset_required_fields(): + transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.generate_data_quality_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_generate_data_quality_rules_rest_flattened(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.GenerateDataQualityRulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.generate_data_quality_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}:generateDataQualityRules" % client.transport._host, args[1]) + + +def test_generate_data_quality_rules_rest_flattened_error(transport: str = 'rest'): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_data_quality_rules( + datascans.GenerateDataQualityRulesRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataScanServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataScanServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataScanServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataScanServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, + transports.DataScanServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataScanServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.CreateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.UpdateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.DeleteDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + call.return_value = datascans.DataScan() + client.get_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scans_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + call.return_value = datascans.ListDataScansResponse() + client.list_data_scans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_data_scan_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + call.return_value = datascans.RunDataScanResponse() + client.run_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.RunDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_job_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + call.return_value = datascans.DataScanJob() + client.get_data_scan_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scan_jobs_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + call.return_value = datascans.ListDataScanJobsResponse() + client.list_data_scan_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScanJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_data_quality_rules_empty_call_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + call.return_value = datascans.GenerateDataQualityRulesResponse() + client.generate_data_quality_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GenerateDataQualityRulesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataScanServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.CreateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.UpdateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.DeleteDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + )) + await client.get_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_scans_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_data_scans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_data_scan_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( + )) + await client.run_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.RunDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_scan_job_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + )) + await client.get_data_scan_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_scan_jobs_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + )) + await client.list_data_scan_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScanJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_data_quality_rules_empty_call_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( + )) + await client.generate_data_quality_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GenerateDataQualityRulesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataScanServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_data_scan_rest_bad_request(request_type=datascans.CreateDataScanRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_scan(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.CreateDataScanRequest, + dict, +]) +def test_create_data_scan_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["data_scan"] = {'name': 'name_value', 'uid': 'uid_value', 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'data': {'entity': 'entity_value', 'resource': 'resource_value'}, 'execution_spec': {'trigger': {'on_demand': {}, 'schedule': {'cron': 'cron_value'}}, 'field': 'field_value'}, 'execution_status': {'latest_job_start_time': {}, 'latest_job_end_time': {}, 'latest_job_create_time': {}}, 'type_': 1, 'data_quality_spec': {'rules': [{'range_expectation': {'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'non_null_expectation': {}, 'set_expectation': {'values': ['values_value1', 'values_value2']}, 'regex_expectation': {'regex': 'regex_value'}, 'uniqueness_expectation': {}, 'statistic_range_expectation': {'statistic': 1, 'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'row_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'table_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'sql_assertion': {'sql_statement': 'sql_statement_value'}, 'column': 'column_value', 'ignore_null': True, 'dimension': 'dimension_value', 'threshold': 0.973, 'name': 'name_value', 'description': 'description_value', 'suspended': True}], 'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}, 'notification_report': {'recipients': {'emails': ['emails_value1', 'emails_value2']}, 'score_threshold_trigger': {'score_threshold': 0.1608}, 'job_failure_trigger': {}, 'job_end_trigger': {}}}, 'catalog_publishing_enabled': True}, 'data_profile_spec': {'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}}, 'include_fields': {'field_names': ['field_names_value1', 'field_names_value2']}, 'exclude_fields': {}}, 'data_discovery_spec': {'bigquery_publishing_config': {'table_type': 1, 'connection': 'connection_value', 'location': 'location_value', 'project': 'project_value'}, 'storage_config': {'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'type_inference_disabled': True, 'quote': 'quote_value'}, 'json_options': {'encoding': 'encoding_value', 'type_inference_disabled': True}}}, 'data_quality_result': {'passed': True, 'score': 0.54, 'dimensions': [{'dimension': {'name': 'name_value'}, 'passed': True, 'score': 0.54}], 'columns': [{'column': 'column_value', 'score': 0.54, 'passed': True, 'dimensions': {}}], 'rules': [{'rule': {}, 'passed': True, 'evaluated_count': 1603, 'passed_count': 1288, 'null_count': 1091, 'pass_ratio': 0.1077, 'failing_rows_query': 'failing_rows_query_value', 'assertion_row_count': 2071}], 'row_count': 992, 'scanned_data': {'incremental_field': {'field': 'field_value', 'start': 'start_value', 'end': 'end_value'}}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}, 'catalog_publishing_status': {'state': 1}}, 'data_profile_result': {'row_count': 992, 'profile': {'fields': [{'name': 'name_value', 'type_': 'type__value', 'mode': 'mode_value', 'profile': {'null_ratio': 0.1081, 'distinct_ratio': 0.1504, 'top_n_values': [{'value': 'value_value', 'count': 553, 'ratio': 0.543}], 'string_profile': {'min_length': 1061, 'max_length': 1063, 'average_length': 0.1468}, 'integer_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 419, 'quartiles': [987, 988], 'max_': 421}, 'double_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 0.419, 'quartiles': [0.987, 0.988], 'max_': 0.421}}}]}, 'scanned_data': {}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}}, 'data_discovery_result': {'bigquery_publishing': {'dataset': 'dataset_value', 'location': 'location_value'}, 'scan_statistics': {'scanned_file_count': 1891, 'data_processed_bytes': 2119, 'files_excluded': 1472, 'tables_created': 1458, 'tables_deleted': 1457, 'tables_updated': 1473, 'filesets_created': 1686, 'filesets_deleted': 1685, 'filesets_updated': 1701}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datascans.CreateDataScanRequest.meta.fields["data_scan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_scan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_scan"][field])): + del request_init["data_scan"][field][i][subfield] + else: + del request_init["data_scan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_scan(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_scan_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_create_data_scan") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_create_data_scan_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_create_data_scan") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.CreateDataScanRequest.pb(datascans.CreateDataScanRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datascans.CreateDataScanRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_scan_rest_bad_request(request_type=datascans.UpdateDataScanRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_scan(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.UpdateDataScanRequest, + dict, +]) +def test_update_data_scan_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} + request_init["data_scan"] = {'name': 'projects/sample1/locations/sample2/dataScans/sample3', 'uid': 'uid_value', 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'data': {'entity': 'entity_value', 'resource': 'resource_value'}, 'execution_spec': {'trigger': {'on_demand': {}, 'schedule': {'cron': 'cron_value'}}, 'field': 'field_value'}, 'execution_status': {'latest_job_start_time': {}, 'latest_job_end_time': {}, 'latest_job_create_time': {}}, 'type_': 1, 'data_quality_spec': {'rules': [{'range_expectation': {'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'non_null_expectation': {}, 'set_expectation': {'values': ['values_value1', 'values_value2']}, 'regex_expectation': {'regex': 'regex_value'}, 'uniqueness_expectation': {}, 'statistic_range_expectation': {'statistic': 1, 'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'row_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'table_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'sql_assertion': {'sql_statement': 'sql_statement_value'}, 'column': 'column_value', 'ignore_null': True, 'dimension': 'dimension_value', 'threshold': 0.973, 'name': 'name_value', 'description': 'description_value', 'suspended': True}], 'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}, 'notification_report': {'recipients': {'emails': ['emails_value1', 'emails_value2']}, 'score_threshold_trigger': {'score_threshold': 0.1608}, 'job_failure_trigger': {}, 'job_end_trigger': {}}}, 'catalog_publishing_enabled': True}, 'data_profile_spec': {'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}}, 'include_fields': {'field_names': ['field_names_value1', 'field_names_value2']}, 'exclude_fields': {}}, 'data_discovery_spec': {'bigquery_publishing_config': {'table_type': 1, 'connection': 'connection_value', 'location': 'location_value', 'project': 'project_value'}, 'storage_config': {'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'type_inference_disabled': True, 'quote': 'quote_value'}, 'json_options': {'encoding': 'encoding_value', 'type_inference_disabled': True}}}, 'data_quality_result': {'passed': True, 'score': 0.54, 'dimensions': [{'dimension': {'name': 'name_value'}, 'passed': True, 'score': 0.54}], 'columns': [{'column': 'column_value', 'score': 0.54, 'passed': True, 'dimensions': {}}], 'rules': [{'rule': {}, 'passed': True, 'evaluated_count': 1603, 'passed_count': 1288, 'null_count': 1091, 'pass_ratio': 0.1077, 'failing_rows_query': 'failing_rows_query_value', 'assertion_row_count': 2071}], 'row_count': 992, 'scanned_data': {'incremental_field': {'field': 'field_value', 'start': 'start_value', 'end': 'end_value'}}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}, 'catalog_publishing_status': {'state': 1}}, 'data_profile_result': {'row_count': 992, 'profile': {'fields': [{'name': 'name_value', 'type_': 'type__value', 'mode': 'mode_value', 'profile': {'null_ratio': 0.1081, 'distinct_ratio': 0.1504, 'top_n_values': [{'value': 'value_value', 'count': 553, 'ratio': 0.543}], 'string_profile': {'min_length': 1061, 'max_length': 1063, 'average_length': 0.1468}, 'integer_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 419, 'quartiles': [987, 988], 'max_': 421}, 'double_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 0.419, 'quartiles': [0.987, 0.988], 'max_': 0.421}}}]}, 'scanned_data': {}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}}, 'data_discovery_result': {'bigquery_publishing': {'dataset': 'dataset_value', 'location': 'location_value'}, 'scan_statistics': {'scanned_file_count': 1891, 'data_processed_bytes': 2119, 'files_excluded': 1472, 'tables_created': 1458, 'tables_deleted': 1457, 'tables_updated': 1473, 'filesets_created': 1686, 'filesets_deleted': 1685, 'filesets_updated': 1701}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = datascans.UpdateDataScanRequest.meta.fields["data_scan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_scan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_scan"][field])): + del request_init["data_scan"][field][i][subfield] + else: + del request_init["data_scan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_scan(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_scan_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_update_data_scan") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_update_data_scan_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_update_data_scan") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.UpdateDataScanRequest.pb(datascans.UpdateDataScanRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datascans.UpdateDataScanRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_scan_rest_bad_request(request_type=datascans.DeleteDataScanRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_scan(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.DeleteDataScanRequest, + dict, +]) +def test_delete_data_scan_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_scan(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_scan_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_delete_data_scan") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_delete_data_scan_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_delete_data_scan") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.DeleteDataScanRequest.pb(datascans.DeleteDataScanRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = datascans.DeleteDataScanRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_scan_rest_bad_request(request_type=datascans.GetDataScanRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_scan(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanRequest, + dict, +]) +def test_get_data_scan_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.DataScan( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + type_=datascans.DataScanType.DATA_QUALITY, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.DataScan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_scan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScan) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_scan_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_get_data_scan") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.GetDataScanRequest.pb(datascans.GetDataScanRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.DataScan.to_json(datascans.DataScan()) + req.return_value.content = return_value + + request = datascans.GetDataScanRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.DataScan() + post_with_metadata.return_value = datascans.DataScan(), metadata + + client.get_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_scans_rest_bad_request(request_type=datascans.ListDataScansRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_scans(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScansRequest, + dict, +]) +def test_list_data_scans_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScansResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.ListDataScansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_scans(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScansPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_scans_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scans") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scans_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_list_data_scans") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.ListDataScansRequest.pb(datascans.ListDataScansRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.ListDataScansResponse.to_json(datascans.ListDataScansResponse()) + req.return_value.content = return_value + + request = datascans.ListDataScansRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.ListDataScansResponse() + post_with_metadata.return_value = datascans.ListDataScansResponse(), metadata + + client.list_data_scans(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_run_data_scan_rest_bad_request(request_type=datascans.RunDataScanRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.run_data_scan(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.RunDataScanRequest, + dict, +]) +def test_run_data_scan_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.RunDataScanResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.RunDataScanResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.run_data_scan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.RunDataScanResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_data_scan_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_run_data_scan") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_run_data_scan_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_run_data_scan") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.RunDataScanRequest.pb(datascans.RunDataScanRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.RunDataScanResponse.to_json(datascans.RunDataScanResponse()) + req.return_value.content = return_value + + request = datascans.RunDataScanRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.RunDataScanResponse() + post_with_metadata.return_value = datascans.RunDataScanResponse(), metadata + + client.run_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_scan_job_rest_bad_request(request_type=datascans.GetDataScanJobRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_scan_job(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.GetDataScanJobRequest, + dict, +]) +def test_get_data_scan_job_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.DataScanJob( + name='name_value', + uid='uid_value', + state=datascans.DataScanJob.State.RUNNING, + message='message_value', + type_=datascans.DataScanType.DATA_QUALITY, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.DataScanJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_scan_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.DataScanJob) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == datascans.DataScanJob.State.RUNNING + assert response.message == 'message_value' + assert response.type_ == datascans.DataScanType.DATA_QUALITY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_scan_job_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_job") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_get_data_scan_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.GetDataScanJobRequest.pb(datascans.GetDataScanJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.DataScanJob.to_json(datascans.DataScanJob()) + req.return_value.content = return_value + + request = datascans.GetDataScanJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.DataScanJob() + post_with_metadata.return_value = datascans.DataScanJob(), metadata + + client.get_data_scan_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_scan_jobs_rest_bad_request(request_type=datascans.ListDataScanJobsRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_scan_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.ListDataScanJobsRequest, + dict, +]) +def test_list_data_scan_jobs_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.ListDataScanJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.ListDataScanJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_scan_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataScanJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_scan_jobs_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scan_jobs") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scan_jobs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_list_data_scan_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.ListDataScanJobsRequest.pb(datascans.ListDataScanJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.ListDataScanJobsResponse.to_json(datascans.ListDataScanJobsResponse()) + req.return_value.content = return_value + + request = datascans.ListDataScanJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.ListDataScanJobsResponse() + post_with_metadata.return_value = datascans.ListDataScanJobsResponse(), metadata + + client.list_data_scan_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_generate_data_quality_rules_rest_bad_request(request_type=datascans.GenerateDataQualityRulesRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_data_quality_rules(request) + + +@pytest.mark.parametrize("request_type", [ + datascans.GenerateDataQualityRulesRequest, + dict, +]) +def test_generate_data_quality_rules_rest_call_success(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = datascans.GenerateDataQualityRulesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_data_quality_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datascans.GenerateDataQualityRulesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_data_quality_rules_rest_interceptors(null_interceptor): + transport = transports.DataScanServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), + ) + client = DataScanServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_generate_data_quality_rules") as post, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "post_generate_data_quality_rules_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_generate_data_quality_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datascans.GenerateDataQualityRulesRequest.pb(datascans.GenerateDataQualityRulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datascans.GenerateDataQualityRulesResponse.to_json(datascans.GenerateDataQualityRulesResponse()) + req.return_value.content = return_value + + request = datascans.GenerateDataQualityRulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datascans.GenerateDataQualityRulesResponse() + post_with_metadata.return_value = datascans.GenerateDataQualityRulesResponse(), metadata + + client.generate_data_quality_rules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_scan_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_scan), + '__call__') as call: + client.create_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.CreateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_scan_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_scan), + '__call__') as call: + client.update_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.UpdateDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_scan_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_scan), + '__call__') as call: + client.delete_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.DeleteDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan), + '__call__') as call: + client.get_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scans_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scans), + '__call__') as call: + client.list_data_scans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_data_scan_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_data_scan), + '__call__') as call: + client.run_data_scan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.RunDataScanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_scan_job_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_scan_job), + '__call__') as call: + client.get_data_scan_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GetDataScanJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_scan_jobs_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_scan_jobs), + '__call__') as call: + client.list_data_scan_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.ListDataScanJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_data_quality_rules_empty_call_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_data_quality_rules), + '__call__') as call: + client.generate_data_quality_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datascans.GenerateDataQualityRulesRequest() + + assert args[0] == request_msg + + +def test_data_scan_service_rest_lro_client(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataScanServiceGrpcTransport, + ) + +def test_data_scan_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataScanServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_scan_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataScanServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_data_scan', + 'update_data_scan', + 'delete_data_scan', + 'get_data_scan', + 'list_data_scans', + 'run_data_scan', + 'get_data_scan_job', + 'list_data_scan_jobs', + 'generate_data_quality_rules', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_scan_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataScanServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_scan_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataScanServiceTransport() + adc.assert_called_once() + + +def test_data_scan_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataScanServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, + ], +) +def test_data_scan_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataScanServiceGrpcTransport, + transports.DataScanServiceGrpcAsyncIOTransport, + transports.DataScanServiceRestTransport, + ], +) +def test_data_scan_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataScanServiceGrpcTransport, grpc_helpers), + (transports.DataScanServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_scan_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_data_scan_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DataScanServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_data_scan_service_host_no_port(transport_name): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_data_scan_service_host_with_port(transport_name): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_data_scan_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataScanServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataScanServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_data_scan._session + session2 = client2.transport.create_data_scan._session + assert session1 != session2 + session1 = client1.transport.update_data_scan._session + session2 = client2.transport.update_data_scan._session + assert session1 != session2 + session1 = client1.transport.delete_data_scan._session + session2 = client2.transport.delete_data_scan._session + assert session1 != session2 + session1 = client1.transport.get_data_scan._session + session2 = client2.transport.get_data_scan._session + assert session1 != session2 + session1 = client1.transport.list_data_scans._session + session2 = client2.transport.list_data_scans._session + assert session1 != session2 + session1 = client1.transport.run_data_scan._session + session2 = client2.transport.run_data_scan._session + assert session1 != session2 + session1 = client1.transport.get_data_scan_job._session + session2 = client2.transport.get_data_scan_job._session + assert session1 != session2 + session1 = client1.transport.list_data_scan_jobs._session + session2 = client2.transport.list_data_scan_jobs._session + assert session1 != session2 + session1 = client1.transport.generate_data_quality_rules._session + session2 = client2.transport.generate_data_quality_rules._session + assert session1 != session2 +def test_data_scan_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataScanServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_scan_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataScanServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) +def test_data_scan_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_scan_service_grpc_lro_client(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_scan_service_grpc_lro_async_client(): + client = DataScanServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_path(): + project = "squid" + location = "clam" + connection = "whelk" + expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + actual = DataScanServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = DataScanServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_connection_path(path) + assert expected == actual + +def test_data_scan_path(): + project = "cuttlefish" + location = "mussel" + dataScan = "winkle" + expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) + actual = DataScanServiceClient.data_scan_path(project, location, dataScan) + assert expected == actual + + +def test_parse_data_scan_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dataScan": "abalone", + } + path = DataScanServiceClient.data_scan_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_data_scan_path(path) + assert expected == actual + +def test_data_scan_job_path(): + project = "squid" + location = "clam" + dataScan = "whelk" + job = "octopus" + expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) + actual = DataScanServiceClient.data_scan_job_path(project, location, dataScan, job) + assert expected == actual + + +def test_parse_data_scan_job_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "dataScan": "cuttlefish", + "job": "mussel", + } + path = DataScanServiceClient.data_scan_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_data_scan_job_path(path) + assert expected == actual + +def test_dataset_path(): + project = "winkle" + dataset = "nautilus" + expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) + actual = DataScanServiceClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "scallop", + "dataset": "abalone", + } + path = DataScanServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_dataset_path(path) + assert expected == actual + +def test_entity_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + actual = DataScanServiceClient.entity_path(project, location, lake, zone, entity) + assert expected == actual + + +def test_parse_entity_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", + } + path = DataScanServiceClient.entity_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_entity_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataScanServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DataScanServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataScanServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DataScanServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataScanServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DataScanServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = DataScanServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DataScanServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataScanServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DataScanServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataScanServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataScanServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DataScanServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataScanServiceClient, transports.DataScanServiceGrpcTransport), + (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py new file mode 100644 index 000000000000..5f1248bbf204 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -0,0 +1,14068 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceAsyncClient +from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceClient +from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers +from google.cloud.dataplex_v1.services.data_taxonomy_service import transports +from google.cloud.dataplex_v1.types import data_taxonomy +from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy +from google.cloud.dataplex_v1.types import security +from google.cloud.dataplex_v1.types import service +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(None) is None + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataTaxonomyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataTaxonomyServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTaxonomyServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataTaxonomyServiceClient._get_client_cert_source(None, False) is None + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataTaxonomyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataTaxonomyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataTaxonomyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataTaxonomyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataTaxonomyServiceClient._get_universe_domain(None, None) == DataTaxonomyServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataTaxonomyServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataTaxonomyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataTaxonomyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataTaxonomyServiceClient, "grpc"), + (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), + (DataTaxonomyServiceClient, "rest"), +]) +def test_data_taxonomy_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataTaxonomyServiceRestTransport, "rest"), +]) +def test_data_taxonomy_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataTaxonomyServiceClient, "grpc"), + (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), + (DataTaxonomyServiceClient, "rest"), +]) +def test_data_taxonomy_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_data_taxonomy_service_client_get_transport_class(): + transport = DataTaxonomyServiceClient.get_transport_class() + available_transports = [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceRestTransport, + ] + assert transport in available_transports + + transport = DataTaxonomyServiceClient.get_transport_class("grpc") + assert transport == transports.DataTaxonomyServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest"), +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "true"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "false"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", "true"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_taxonomy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient +]) +@mock.patch.object(DataTaxonomyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient +]) +@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) +@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) +def test_data_taxonomy_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest"), +]) +def test_data_taxonomy_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", None), +]) +def test_data_taxonomy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_taxonomy_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataTaxonomyServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_taxonomy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.CreateDataTaxonomyRequest, + dict, +]) +def test_create_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest( + parent='parent_value', + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_data_taxonomy.CreateDataTaxonomyRequest( + parent='parent_value', + data_taxonomy_id='data_taxonomy_id_value', + ) + +def test_create_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc + request = {} + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_taxonomy] = mock_rpc + + request = {} + await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_taxonomy_async_from_dict(): + await test_create_data_taxonomy_async(request_type=dict) + +def test_create_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_taxonomy( + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].data_taxonomy_id + mock_val = 'data_taxonomy_id_value' + assert arg == mock_val + + +def test_create_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_taxonomy( + gcd_data_taxonomy.CreateDataTaxonomyRequest(), + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_taxonomy( + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].data_taxonomy_id + mock_val = 'data_taxonomy_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_taxonomy( + gcd_data_taxonomy.CreateDataTaxonomyRequest(), + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.UpdateDataTaxonomyRequest, + dict, +]) +def test_update_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest( + ) + +def test_update_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc + request = {} + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_taxonomy] = mock_rpc + + request = {} + await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_taxonomy_async_from_dict(): + await test_update_data_taxonomy_async(request_type=dict) + +def test_update_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + request.data_taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_taxonomy.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + request.data_taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_taxonomy.name=name_value', + ) in kw['metadata'] + + +def test_update_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_taxonomy( + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_taxonomy( + gcd_data_taxonomy.UpdateDataTaxonomyRequest(), + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_taxonomy( + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_taxonomy + mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_taxonomy( + gcd_data_taxonomy.UpdateDataTaxonomyRequest(), + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataTaxonomyRequest, + dict, +]) +def test_delete_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataTaxonomyRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataTaxonomyRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc + request = {} + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_taxonomy] = mock_rpc + + request = {} + await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_async_from_dict(): + await test_delete_data_taxonomy_async(request_type=dict) + +def test_delete_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_taxonomy( + data_taxonomy.DeleteDataTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_taxonomy( + data_taxonomy.DeleteDataTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataTaxonomiesRequest, + dict, +]) +def test_list_data_taxonomies(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataTaxonomiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTaxonomiesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataTaxonomiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_taxonomies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataTaxonomiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_taxonomies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_taxonomies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc + request = {} + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_taxonomies in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_taxonomies] = mock_rpc + + request = {} + await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataTaxonomiesRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataTaxonomiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTaxonomiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_from_dict(): + await test_list_data_taxonomies_async(request_type=dict) + +def test_list_data_taxonomies_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) + await client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_taxonomies_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_taxonomies_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_taxonomies( + data_taxonomy.ListDataTaxonomiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_taxonomies_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_taxonomies_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_taxonomies( + data_taxonomy.ListDataTaxonomiesRequest(), + parent='parent_value', + ) + + +def test_list_data_taxonomies_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_taxonomies(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataTaxonomy) + for i in results) +def test_list_data_taxonomies_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_taxonomies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_taxonomies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataTaxonomy) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_taxonomies_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_taxonomies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataTaxonomyRequest, + dict, +]) +def test_get_data_taxonomy(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + ) + response = client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataTaxonomy) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + assert response.class_count == 1182 + + +def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataTaxonomyRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_taxonomy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataTaxonomyRequest( + name='name_value', + ) + +def test_get_data_taxonomy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc + request = {} + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_taxonomy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_taxonomy] = mock_rpc + + request = {} + await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataTaxonomyRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + )) + response = await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataTaxonomyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataTaxonomy) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + assert response.class_count == 1182 + + +@pytest.mark.asyncio +async def test_get_data_taxonomy_async_from_dict(): + await test_get_data_taxonomy_async(request_type=dict) + +def test_get_data_taxonomy_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = data_taxonomy.DataTaxonomy() + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_taxonomy_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) + await client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_taxonomy_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_taxonomy_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_taxonomy( + data_taxonomy.GetDataTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_taxonomy_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataTaxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_taxonomy_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_taxonomy( + data_taxonomy.GetDataTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeBindingRequest, + dict, +]) +def test_create_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.CreateDataAttributeBindingRequest( + parent='parent_value', + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.CreateDataAttributeBindingRequest( + parent='parent_value', + data_attribute_binding_id='data_attribute_binding_id_value', + ) + +def test_create_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc + request = {} + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_attribute_binding] = mock_rpc + + request = {} + await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_async_from_dict(): + await test_create_data_attribute_binding_async(request_type=dict) + +def test_create_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeBindingRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeBindingRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_attribute_binding( + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_binding_id + mock_val = 'data_attribute_binding_id_value' + assert arg == mock_val + + +def test_create_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute_binding( + data_taxonomy.CreateDataAttributeBindingRequest(), + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_attribute_binding( + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_binding_id + mock_val = 'data_attribute_binding_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_attribute_binding( + data_taxonomy.CreateDataAttributeBindingRequest(), + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeBindingRequest, + dict, +]) +def test_update_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.UpdateDataAttributeBindingRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest( + ) + +def test_update_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc + request = {} + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_attribute_binding] = mock_rpc + + request = {} + await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_async_from_dict(): + await test_update_data_attribute_binding_async(request_type=dict) + +def test_update_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeBindingRequest() + + request.data_attribute_binding.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute_binding.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeBindingRequest() + + request.data_attribute_binding.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute_binding.name=name_value', + ) in kw['metadata'] + + +def test_update_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_attribute_binding( + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute_binding( + data_taxonomy.UpdateDataAttributeBindingRequest(), + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_attribute_binding( + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute_binding + mock_val = data_taxonomy.DataAttributeBinding(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_attribute_binding( + data_taxonomy.UpdateDataAttributeBindingRequest(), + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeBindingRequest, + dict, +]) +def test_delete_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataAttributeBindingRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataAttributeBindingRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc + request = {} + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute_binding] = mock_rpc + + request = {} + await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_async_from_dict(): + await test_delete_data_attribute_binding_async(request_type=dict) + +def test_delete_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute_binding( + data_taxonomy.DeleteDataAttributeBindingRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_attribute_binding( + data_taxonomy.DeleteDataAttributeBindingRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributeBindingsRequest, + dict, +]) +def test_list_data_attribute_bindings(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributeBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributeBindingsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataAttributeBindingsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_attribute_bindings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataAttributeBindingsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_attribute_bindings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc + request = {} + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_attribute_bindings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_attribute_bindings] = mock_rpc + + request = {} + await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributeBindingsRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributeBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributeBindingsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_from_dict(): + await test_list_data_attribute_bindings_async(request_type=dict) + +def test_list_data_attribute_bindings_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributeBindingsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributeBindingsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) + await client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_attribute_bindings_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_attribute_bindings( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_attribute_bindings_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attribute_bindings( + data_taxonomy.ListDataAttributeBindingsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_attribute_bindings( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_attribute_bindings( + data_taxonomy.ListDataAttributeBindingsRequest(), + parent='parent_value', + ) + + +def test_list_data_attribute_bindings_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_attribute_bindings(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttributeBinding) + for i in results) +def test_list_data_attribute_bindings_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_attribute_bindings(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_attribute_bindings(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataAttributeBinding) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_attribute_bindings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeBindingRequest, + dict, +]) +def test_get_data_attribute_binding(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + resource='resource_value', + ) + response = client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttributeBinding) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.attributes == ['attributes_value'] + + +def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataAttributeBindingRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_attribute_binding(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataAttributeBindingRequest( + name='name_value', + ) + +def test_get_data_attribute_binding_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc + request = {} + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_attribute_binding in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_attribute_binding] = mock_rpc + + request = {} + await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeBindingRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + )) + response = await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeBindingRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttributeBinding) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.attributes == ['attributes_value'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_async_from_dict(): + await test_get_data_attribute_binding_async(request_type=dict) + +def test_get_data_attribute_binding_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = data_taxonomy.DataAttributeBinding() + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeBindingRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) + await client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_attribute_binding_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_attribute_binding_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute_binding( + data_taxonomy.GetDataAttributeBindingRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttributeBinding() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_attribute_binding( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_attribute_binding_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_attribute_binding( + data_taxonomy.GetDataAttributeBindingRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeRequest, + dict, +]) +def test_create_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.CreateDataAttributeRequest( + parent='parent_value', + data_attribute_id='data_attribute_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.CreateDataAttributeRequest( + parent='parent_value', + data_attribute_id='data_attribute_id_value', + ) + +def test_create_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc + request = {} + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_data_attribute] = mock_rpc + + request = {} + await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.CreateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_attribute_async_from_dict(): + await test_create_data_attribute_async(request_type=dict) + +def test_create_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.CreateDataAttributeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_attribute( + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_id + mock_val = 'data_attribute_id_value' + assert arg == mock_val + + +def test_create_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute( + data_taxonomy.CreateDataAttributeRequest(), + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + +@pytest.mark.asyncio +async def test_create_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_attribute( + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].data_attribute_id + mock_val = 'data_attribute_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_attribute( + data_taxonomy.CreateDataAttributeRequest(), + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeRequest, + dict, +]) +def test_update_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.UpdateDataAttributeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.UpdateDataAttributeRequest( + ) + +def test_update_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc + request = {} + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_data_attribute] = mock_rpc + + request = {} + await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.UpdateDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_attribute_async_from_dict(): + await test_update_data_attribute_async(request_type=dict) + +def test_update_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeRequest() + + request.data_attribute.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.UpdateDataAttributeRequest() + + request.data_attribute.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'data_attribute.name=name_value', + ) in kw['metadata'] + + +def test_update_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_attribute( + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute( + data_taxonomy.UpdateDataAttributeRequest(), + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_attribute( + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_attribute + mock_val = data_taxonomy.DataAttribute(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_attribute( + data_taxonomy.UpdateDataAttributeRequest(), + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeRequest, + dict, +]) +def test_delete_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.DeleteDataAttributeRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.DeleteDataAttributeRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc + request = {} + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute] = mock_rpc + + request = {} + await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.DeleteDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_attribute_async_from_dict(): + await test_delete_data_attribute_async(request_type=dict) + +def test_delete_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.DeleteDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute( + data_taxonomy.DeleteDataAttributeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_attribute( + data_taxonomy.DeleteDataAttributeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributesRequest, + dict, +]) +def test_list_data_attributes(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_data_attributes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.ListDataAttributesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_data_attributes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.ListDataAttributesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_data_attributes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attributes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc + request = {} + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attributes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_data_attributes in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_data_attributes] = mock_rpc + + request = {} + await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_data_attributes_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributesRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.ListDataAttributesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_data_attributes_async_from_dict(): + await test_list_data_attributes_async(request_type=dict) + +def test_list_data_attributes_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributesResponse() + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_data_attributes_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.ListDataAttributesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) + await client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_data_attributes_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_attributes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_data_attributes_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attributes( + data_taxonomy.ListDataAttributesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_data_attributes_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.ListDataAttributesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_attributes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_data_attributes_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_attributes( + data_taxonomy.ListDataAttributesRequest(), + parent='parent_value', + ) + + +def test_list_data_attributes_pager(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_data_attributes(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttribute) + for i in results) +def test_list_data_attributes_pages(transport_name: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_attributes(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_data_attributes_async_pager(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_attributes(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_taxonomy.DataAttribute) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_attributes_async_pages(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_attributes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeRequest, + dict, +]) +def test_get_data_attribute(request_type, transport: str = 'grpc'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + ) + response = client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttribute) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.parent_id == 'parent_id_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + + +def test_get_data_attribute_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_taxonomy.GetDataAttributeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_data_attribute(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_taxonomy.GetDataAttributeRequest( + name='name_value', + ) + +def test_get_data_attribute_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc + request = {} + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_data_attribute in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_data_attribute] = mock_rpc + + request = {} + await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeRequest): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + )) + response = await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_taxonomy.GetDataAttributeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttribute) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.parent_id == 'parent_id_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_data_attribute_async_from_dict(): + await test_get_data_attribute_async(request_type=dict) + +def test_get_data_attribute_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = data_taxonomy.DataAttribute() + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_data_attribute_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_taxonomy.GetDataAttributeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) + await client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_data_attribute_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_data_attribute_flattened_error(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute( + data_taxonomy.GetDataAttributeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_data_attribute_flattened_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = data_taxonomy.DataAttribute() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_attribute( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_data_attribute_flattened_error_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_attribute( + data_taxonomy.GetDataAttributeRequest(), + name='name_value', + ) + + +def test_create_data_taxonomy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc + + request = {} + client.create_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_taxonomy_rest_required_fields(request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_taxonomy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "dataTaxonomyId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_taxonomy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataTaxonomyId" in jsonified_request + assert jsonified_request["dataTaxonomyId"] == request_init["data_taxonomy_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["dataTaxonomyId"] = 'data_taxonomy_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_taxonomy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_taxonomy_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "dataTaxonomyId" in jsonified_request + assert jsonified_request["dataTaxonomyId"] == 'data_taxonomy_id_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_taxonomy(request) + + expected_params = [ + ( + "dataTaxonomyId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_data_taxonomy_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_data_taxonomy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("dataTaxonomyId", "validateOnly", )) & set(("parent", "dataTaxonomyId", "dataTaxonomy", ))) + + +def test_create_data_taxonomy_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_taxonomy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataTaxonomies" % client.transport._host, args[1]) + + +def test_create_data_taxonomy_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_taxonomy( + gcd_data_taxonomy.CreateDataTaxonomyRequest(), + parent='parent_value', + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + data_taxonomy_id='data_taxonomy_id_value', + ) + + +def test_update_data_taxonomy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc + + request = {} + client.update_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_taxonomy_rest_required_fields(request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_taxonomy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_taxonomy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_taxonomy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_data_taxonomy_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_data_taxonomy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataTaxonomy", ))) + + +def test_update_data_taxonomy_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_taxonomy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{data_taxonomy.name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) + + +def test_update_data_taxonomy_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_taxonomy( + gcd_data_taxonomy.UpdateDataTaxonomyRequest(), + data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_data_taxonomy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc + + request = {} + client.delete_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_taxonomy_rest_required_fields(request_type=data_taxonomy.DeleteDataTaxonomyRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_taxonomy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_taxonomy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_taxonomy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_data_taxonomy_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_data_taxonomy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_data_taxonomy_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_taxonomy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) + + +def test_delete_data_taxonomy_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_taxonomy( + data_taxonomy.DeleteDataTaxonomyRequest(), + name='name_value', + ) + + +def test_list_data_taxonomies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_taxonomies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc + + request = {} + client.list_data_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_taxonomies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_taxonomies_rest_required_fields(request_type=data_taxonomy.ListDataTaxonomiesRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_taxonomies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_taxonomies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataTaxonomiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_taxonomies(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_data_taxonomies_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_data_taxonomies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_data_taxonomies_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataTaxonomiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_taxonomies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataTaxonomies" % client.transport._host, args[1]) + + +def test_list_data_taxonomies_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_taxonomies( + data_taxonomy.ListDataTaxonomiesRequest(), + parent='parent_value', + ) + + +def test_list_data_taxonomies_rest_pager(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[], + next_page_token='def', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataTaxonomiesResponse( + data_taxonomies=[ + data_taxonomy.DataTaxonomy(), + data_taxonomy.DataTaxonomy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(data_taxonomy.ListDataTaxonomiesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_data_taxonomies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataTaxonomy) + for i in results) + + pages = list(client.list_data_taxonomies(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_data_taxonomy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_taxonomy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc + + request = {} + client.get_data_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_taxonomy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_taxonomy_rest_required_fields(request_type=data_taxonomy.GetDataTaxonomyRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_taxonomy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_taxonomy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataTaxonomy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataTaxonomy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_taxonomy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_data_taxonomy_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_data_taxonomy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_data_taxonomy_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataTaxonomy() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.DataTaxonomy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_taxonomy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) + + +def test_get_data_taxonomy_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_taxonomy( + data_taxonomy.GetDataTaxonomyRequest(), + name='name_value', + ) + + +def test_create_data_attribute_binding_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc + + request = {} + client.create_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.CreateDataAttributeBindingRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_attribute_binding_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "dataAttributeBindingId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute_binding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataAttributeBindingId" in jsonified_request + assert jsonified_request["dataAttributeBindingId"] == request_init["data_attribute_binding_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["dataAttributeBindingId"] = 'data_attribute_binding_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute_binding._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_attribute_binding_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "dataAttributeBindingId" in jsonified_request + assert jsonified_request["dataAttributeBindingId"] == 'data_attribute_binding_id_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_attribute_binding(request) + + expected_params = [ + ( + "dataAttributeBindingId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_data_attribute_binding_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_data_attribute_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(("dataAttributeBindingId", "validateOnly", )) & set(("parent", "dataAttributeBindingId", "dataAttributeBinding", ))) + + +def test_create_data_attribute_binding_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_attribute_binding(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataAttributeBindings" % client.transport._host, args[1]) + + +def test_create_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute_binding( + data_taxonomy.CreateDataAttributeBindingRequest(), + parent='parent_value', + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + data_attribute_binding_id='data_attribute_binding_id_value', + ) + + +def test_update_data_attribute_binding_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc + + request = {} + client.update_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.UpdateDataAttributeBindingRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute_binding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute_binding._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_attribute_binding(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_data_attribute_binding_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_data_attribute_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataAttributeBinding", ))) + + +def test_update_data_attribute_binding_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_attribute_binding(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{data_attribute_binding.name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) + + +def test_update_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute_binding( + data_taxonomy.UpdateDataAttributeBindingRequest(), + data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_data_attribute_binding_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc + + request = {} + client.delete_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.DeleteDataAttributeBindingRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["etag"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "etag" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute_binding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "etag" in jsonified_request + assert jsonified_request["etag"] == request_init["etag"] + + jsonified_request["name"] = 'name_value' + jsonified_request["etag"] = 'etag_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute_binding._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "etag" in jsonified_request + assert jsonified_request["etag"] == 'etag_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_attribute_binding(request) + + expected_params = [ + ( + "etag", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_data_attribute_binding_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_data_attribute_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", "etag", ))) + + +def test_delete_data_attribute_binding_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_attribute_binding(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) + + +def test_delete_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute_binding( + data_taxonomy.DeleteDataAttributeBindingRequest(), + name='name_value', + ) + + +def test_list_data_attribute_bindings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc + + request = {} + client.list_data_attribute_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attribute_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_attribute_bindings_rest_required_fields(request_type=data_taxonomy.ListDataAttributeBindingsRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attribute_bindings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attribute_bindings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributeBindingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_attribute_bindings(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_data_attribute_bindings_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_data_attribute_bindings._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_data_attribute_bindings_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributeBindingsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_attribute_bindings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataAttributeBindings" % client.transport._host, args[1]) + + +def test_list_data_attribute_bindings_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attribute_bindings( + data_taxonomy.ListDataAttributeBindingsRequest(), + parent='parent_value', + ) + + +def test_list_data_attribute_bindings_rest_pager(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributeBindingsResponse( + data_attribute_bindings=[ + data_taxonomy.DataAttributeBinding(), + data_taxonomy.DataAttributeBinding(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(data_taxonomy.ListDataAttributeBindingsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_data_attribute_bindings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttributeBinding) + for i in results) + + pages = list(client.list_data_attribute_bindings(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_data_attribute_binding_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc + + request = {} + client.get_data_attribute_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute_binding(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.GetDataAttributeBindingRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute_binding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute_binding._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttributeBinding() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttributeBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_attribute_binding(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_data_attribute_binding_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_data_attribute_binding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_data_attribute_binding_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttributeBinding() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttributeBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_attribute_binding(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) + + +def test_get_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute_binding( + data_taxonomy.GetDataAttributeBindingRequest(), + name='name_value', + ) + + +def test_create_data_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc + + request = {} + client.create_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_attribute_rest_required_fields(request_type=data_taxonomy.CreateDataAttributeRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_attribute_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "dataAttributeId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataAttributeId" in jsonified_request + assert jsonified_request["dataAttributeId"] == request_init["data_attribute_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["dataAttributeId"] = 'data_attribute_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_attribute_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "dataAttributeId" in jsonified_request + assert jsonified_request["dataAttributeId"] == 'data_attribute_id_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_attribute(request) + + expected_params = [ + ( + "dataAttributeId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_data_attribute_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_data_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(("dataAttributeId", "validateOnly", )) & set(("parent", "dataAttributeId", "dataAttribute", ))) + + +def test_create_data_attribute_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes" % client.transport._host, args[1]) + + +def test_create_data_attribute_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_attribute( + data_taxonomy.CreateDataAttributeRequest(), + parent='parent_value', + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + data_attribute_id='data_attribute_id_value', + ) + + +def test_update_data_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc + + request = {} + client.update_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_attribute_rest_required_fields(request_type=data_taxonomy.UpdateDataAttributeRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_attribute(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_data_attribute_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_data_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataAttribute", ))) + + +def test_update_data_attribute_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{data_attribute.name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) + + +def test_update_data_attribute_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_attribute( + data_taxonomy.UpdateDataAttributeRequest(), + data_attribute=data_taxonomy.DataAttribute(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_data_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc + + request = {} + client.delete_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_attribute_rest_required_fields(request_type=data_taxonomy.DeleteDataAttributeRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_attribute(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_data_attribute_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_data_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_data_attribute_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) + + +def test_delete_data_attribute_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_attribute( + data_taxonomy.DeleteDataAttributeRequest(), + name='name_value', + ) + + +def test_list_data_attributes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_attributes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc + + request = {} + client.list_data_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_attributes_rest_required_fields(request_type=data_taxonomy.ListDataAttributesRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_attributes(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_data_attributes_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_data_attributes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_data_attributes_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_attributes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes" % client.transport._host, args[1]) + + +def test_list_data_attributes_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_attributes( + data_taxonomy.ListDataAttributesRequest(), + parent='parent_value', + ) + + +def test_list_data_attributes_rest_pager(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + next_page_token='abc', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[], + next_page_token='def', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + ], + next_page_token='ghi', + ), + data_taxonomy.ListDataAttributesResponse( + data_attributes=[ + data_taxonomy.DataAttribute(), + data_taxonomy.DataAttribute(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(data_taxonomy.ListDataAttributesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + + pager = client.list_data_attributes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_taxonomy.DataAttribute) + for i in results) + + pages = list(client.list_data_attributes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_data_attribute_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_attribute in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc + + request = {} + client.get_data_attribute(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_attribute(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_attribute_rest_required_fields(request_type=data_taxonomy.GetDataAttributeRequest): + transport_class = transports.DataTaxonomyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_attribute(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_data_attribute_rest_unset_required_fields(): + transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_data_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_data_attribute_rest_flattened(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttribute() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_attribute(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) + + +def test_get_data_attribute_rest_flattened_error(transport: str = 'rest'): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_attribute( + data_taxonomy.GetDataAttributeRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTaxonomyServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataTaxonomyServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTaxonomyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, + transports.DataTaxonomyServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataTaxonomyServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_taxonomies_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + call.return_value = data_taxonomy.ListDataTaxonomiesResponse() + client.list_data_taxonomies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataTaxonomiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_taxonomy_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + call.return_value = data_taxonomy.DataTaxonomy() + client.get_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attribute_bindings_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + client.list_data_attribute_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributeBindingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_binding_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + call.return_value = data_taxonomy.DataAttributeBinding() + client.get_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attributes_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + call.return_value = data_taxonomy.ListDataAttributesResponse() + client.list_data_attributes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_empty_call_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + call.return_value = data_taxonomy.DataAttribute() + client.get_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataTaxonomyServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_taxonomies_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_taxonomies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataTaxonomiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_taxonomy_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + )) + await client.get_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_attribute_bindings_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_attribute_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributeBindingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_attribute_binding_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + )) + await client.get_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_attributes_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_data_attributes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_attribute_empty_call_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + )) + await client.get_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataTaxonomyServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_data_taxonomy_rest_bad_request(request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_taxonomy(request) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.CreateDataTaxonomyRequest, + dict, +]) +def test_create_data_taxonomy_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["data_taxonomy"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'attribute_count': 1628, 'etag': 'etag_value', 'class_count': 1182} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcd_data_taxonomy.CreateDataTaxonomyRequest.meta.fields["data_taxonomy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_taxonomy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_taxonomy"][field])): + del request_init["data_taxonomy"][field][i][subfield] + else: + del request_init["data_taxonomy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_taxonomy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_taxonomy_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_taxonomy") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_taxonomy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_taxonomy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcd_data_taxonomy.CreateDataTaxonomyRequest.pb(gcd_data_taxonomy.CreateDataTaxonomyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcd_data_taxonomy.CreateDataTaxonomyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_taxonomy_rest_bad_request(request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_taxonomy(request) + + +@pytest.mark.parametrize("request_type", [ + gcd_data_taxonomy.UpdateDataTaxonomyRequest, + dict, +]) +def test_update_data_taxonomy_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} + request_init["data_taxonomy"] = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'attribute_count': 1628, 'etag': 'etag_value', 'class_count': 1182} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcd_data_taxonomy.UpdateDataTaxonomyRequest.meta.fields["data_taxonomy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_taxonomy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_taxonomy"][field])): + del request_init["data_taxonomy"][field][i][subfield] + else: + del request_init["data_taxonomy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_taxonomy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_taxonomy_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_taxonomy") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_taxonomy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_taxonomy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcd_data_taxonomy.UpdateDataTaxonomyRequest.pb(gcd_data_taxonomy.UpdateDataTaxonomyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_taxonomy_rest_bad_request(request_type=data_taxonomy.DeleteDataTaxonomyRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_taxonomy(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataTaxonomyRequest, + dict, +]) +def test_delete_data_taxonomy_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_taxonomy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_taxonomy_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_taxonomy") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_taxonomy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_taxonomy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.DeleteDataTaxonomyRequest.pb(data_taxonomy.DeleteDataTaxonomyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.DeleteDataTaxonomyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_taxonomies_rest_bad_request(request_type=data_taxonomy.ListDataTaxonomiesRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_taxonomies(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataTaxonomiesRequest, + dict, +]) +def test_list_data_taxonomies_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataTaxonomiesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_taxonomies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTaxonomiesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_taxonomies_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_taxonomies") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_taxonomies_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_taxonomies") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.ListDataTaxonomiesRequest.pb(data_taxonomy.ListDataTaxonomiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.ListDataTaxonomiesResponse.to_json(data_taxonomy.ListDataTaxonomiesResponse()) + req.return_value.content = return_value + + request = data_taxonomy.ListDataTaxonomiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.ListDataTaxonomiesResponse() + post_with_metadata.return_value = data_taxonomy.ListDataTaxonomiesResponse(), metadata + + client.list_data_taxonomies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_taxonomy_rest_bad_request(request_type=data_taxonomy.GetDataTaxonomyRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_taxonomy(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataTaxonomyRequest, + dict, +]) +def test_get_data_taxonomy_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataTaxonomy( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + attribute_count=1628, + etag='etag_value', + class_count=1182, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataTaxonomy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_taxonomy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataTaxonomy) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + assert response.class_count == 1182 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_taxonomy_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_taxonomy") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_taxonomy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_taxonomy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.GetDataTaxonomyRequest.pb(data_taxonomy.GetDataTaxonomyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.DataTaxonomy.to_json(data_taxonomy.DataTaxonomy()) + req.return_value.content = return_value + + request = data_taxonomy.GetDataTaxonomyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.DataTaxonomy() + post_with_metadata.return_value = data_taxonomy.DataTaxonomy(), metadata + + client.get_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.CreateDataAttributeBindingRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_attribute_binding(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeBindingRequest, + dict, +]) +def test_create_data_attribute_binding_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["data_attribute_binding"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'resource': 'resource_value', 'attributes': ['attributes_value1', 'attributes_value2'], 'paths': [{'name': 'name_value', 'attributes': ['attributes_value1', 'attributes_value2']}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_taxonomy.CreateDataAttributeBindingRequest.meta.fields["data_attribute_binding"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_attribute_binding"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_attribute_binding"][field])): + del request_init["data_attribute_binding"][field][i][subfield] + else: + del request_init["data_attribute_binding"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_attribute_binding(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_attribute_binding_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_binding") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_binding_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_attribute_binding") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.CreateDataAttributeBindingRequest.pb(data_taxonomy.CreateDataAttributeBindingRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.CreateDataAttributeBindingRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.UpdateDataAttributeBindingRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_attribute_binding(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeBindingRequest, + dict, +]) +def test_update_data_attribute_binding_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} + request_init["data_attribute_binding"] = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'resource': 'resource_value', 'attributes': ['attributes_value1', 'attributes_value2'], 'paths': [{'name': 'name_value', 'attributes': ['attributes_value1', 'attributes_value2']}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_taxonomy.UpdateDataAttributeBindingRequest.meta.fields["data_attribute_binding"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_attribute_binding"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_attribute_binding"][field])): + del request_init["data_attribute_binding"][field][i][subfield] + else: + del request_init["data_attribute_binding"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_attribute_binding(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_attribute_binding_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_binding") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_binding_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_attribute_binding") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.UpdateDataAttributeBindingRequest.pb(data_taxonomy.UpdateDataAttributeBindingRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.UpdateDataAttributeBindingRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.DeleteDataAttributeBindingRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_attribute_binding(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeBindingRequest, + dict, +]) +def test_delete_data_attribute_binding_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_attribute_binding(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_attribute_binding_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_binding") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_binding_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_attribute_binding") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.DeleteDataAttributeBindingRequest.pb(data_taxonomy.DeleteDataAttributeBindingRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.DeleteDataAttributeBindingRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_attribute_bindings_rest_bad_request(request_type=data_taxonomy.ListDataAttributeBindingsRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_attribute_bindings(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributeBindingsRequest, + dict, +]) +def test_list_data_attribute_bindings_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributeBindingsResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_attribute_bindings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributeBindingsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_attribute_bindings_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attribute_bindings") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attribute_bindings_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_attribute_bindings") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.ListDataAttributeBindingsRequest.pb(data_taxonomy.ListDataAttributeBindingsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.ListDataAttributeBindingsResponse.to_json(data_taxonomy.ListDataAttributeBindingsResponse()) + req.return_value.content = return_value + + request = data_taxonomy.ListDataAttributeBindingsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.ListDataAttributeBindingsResponse() + post_with_metadata.return_value = data_taxonomy.ListDataAttributeBindingsResponse(), metadata + + client.list_data_attribute_bindings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.GetDataAttributeBindingRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_attribute_binding(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeBindingRequest, + dict, +]) +def test_get_data_attribute_binding_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttributeBinding( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + etag='etag_value', + attributes=['attributes_value'], + resource='resource_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttributeBinding.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_attribute_binding(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttributeBinding) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.etag == 'etag_value' + assert response.attributes == ['attributes_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_attribute_binding_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_binding") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_binding_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_attribute_binding") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.GetDataAttributeBindingRequest.pb(data_taxonomy.GetDataAttributeBindingRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.DataAttributeBinding.to_json(data_taxonomy.DataAttributeBinding()) + req.return_value.content = return_value + + request = data_taxonomy.GetDataAttributeBindingRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.DataAttributeBinding() + post_with_metadata.return_value = data_taxonomy.DataAttributeBinding(), metadata + + client.get_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_data_attribute_rest_bad_request(request_type=data_taxonomy.CreateDataAttributeRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_attribute(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.CreateDataAttributeRequest, + dict, +]) +def test_create_data_attribute_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request_init["data_attribute"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'parent_id': 'parent_id_value', 'attribute_count': 1628, 'etag': 'etag_value', 'resource_access_spec': {'readers': ['readers_value1', 'readers_value2'], 'writers': ['writers_value1', 'writers_value2'], 'owners': ['owners_value1', 'owners_value2']}, 'data_access_spec': {'readers': ['readers_value1', 'readers_value2']}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_taxonomy.CreateDataAttributeRequest.meta.fields["data_attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_attribute"][field])): + del request_init["data_attribute"][field][i][subfield] + else: + del request_init["data_attribute"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_attribute(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_attribute_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_attribute") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.CreateDataAttributeRequest.pb(data_taxonomy.CreateDataAttributeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.CreateDataAttributeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_attribute_rest_bad_request(request_type=data_taxonomy.UpdateDataAttributeRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_attribute(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.UpdateDataAttributeRequest, + dict, +]) +def test_update_data_attribute_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} + request_init["data_attribute"] = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'parent_id': 'parent_id_value', 'attribute_count': 1628, 'etag': 'etag_value', 'resource_access_spec': {'readers': ['readers_value1', 'readers_value2'], 'writers': ['writers_value1', 'writers_value2'], 'owners': ['owners_value1', 'owners_value2']}, 'data_access_spec': {'readers': ['readers_value1', 'readers_value2']}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_taxonomy.UpdateDataAttributeRequest.meta.fields["data_attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_attribute"][field])): + del request_init["data_attribute"][field][i][subfield] + else: + del request_init["data_attribute"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_attribute(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_attribute_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_attribute") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.UpdateDataAttributeRequest.pb(data_taxonomy.UpdateDataAttributeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.UpdateDataAttributeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_attribute_rest_bad_request(request_type=data_taxonomy.DeleteDataAttributeRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_attribute(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.DeleteDataAttributeRequest, + dict, +]) +def test_delete_data_attribute_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_attribute(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_attribute_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_attribute") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.DeleteDataAttributeRequest.pb(data_taxonomy.DeleteDataAttributeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = data_taxonomy.DeleteDataAttributeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_attributes_rest_bad_request(request_type=data_taxonomy.ListDataAttributesRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_attributes(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.ListDataAttributesRequest, + dict, +]) +def test_list_data_attributes_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.ListDataAttributesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_attributes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataAttributesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_attributes_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attributes") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attributes_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_attributes") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.ListDataAttributesRequest.pb(data_taxonomy.ListDataAttributesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.ListDataAttributesResponse.to_json(data_taxonomy.ListDataAttributesResponse()) + req.return_value.content = return_value + + request = data_taxonomy.ListDataAttributesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.ListDataAttributesResponse() + post_with_metadata.return_value = data_taxonomy.ListDataAttributesResponse(), metadata + + client.list_data_attributes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_attribute_rest_bad_request(request_type=data_taxonomy.GetDataAttributeRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_attribute(request) + + +@pytest.mark.parametrize("request_type", [ + data_taxonomy.GetDataAttributeRequest, + dict, +]) +def test_get_data_attribute_rest_call_success(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = data_taxonomy.DataAttribute( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + parent_id='parent_id_value', + attribute_count=1628, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_taxonomy.DataAttribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_attribute(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_taxonomy.DataAttribute) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.parent_id == 'parent_id_value' + assert response.attribute_count == 1628 + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_attribute_rest_interceptors(null_interceptor): + transport = transports.DataTaxonomyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), + ) + client = DataTaxonomyServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute") as post, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_attribute") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_taxonomy.GetDataAttributeRequest.pb(data_taxonomy.GetDataAttributeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_taxonomy.DataAttribute.to_json(data_taxonomy.DataAttribute()) + req.return_value.content = return_value + + request = data_taxonomy.GetDataAttributeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_taxonomy.DataAttribute() + post_with_metadata.return_value = data_taxonomy.DataAttribute(), metadata + + client.get_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_taxonomy_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_taxonomy), + '__call__') as call: + client.create_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_taxonomy_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_taxonomy), + '__call__') as call: + client.update_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_taxonomy_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_taxonomy), + '__call__') as call: + client.delete_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_taxonomies_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_taxonomies), + '__call__') as call: + client.list_data_taxonomies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataTaxonomiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_taxonomy_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_taxonomy), + '__call__') as call: + client.get_data_taxonomy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataTaxonomyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_binding_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute_binding), + '__call__') as call: + client.create_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_binding_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute_binding), + '__call__') as call: + client.update_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_binding_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute_binding), + '__call__') as call: + client.delete_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attribute_bindings_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attribute_bindings), + '__call__') as call: + client.list_data_attribute_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributeBindingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_binding_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute_binding), + '__call__') as call: + client.get_data_attribute_binding(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeBindingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_attribute_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_attribute), + '__call__') as call: + client.create_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.CreateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_attribute_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_attribute), + '__call__') as call: + client.update_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.UpdateDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_attribute_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_attribute), + '__call__') as call: + client.delete_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.DeleteDataAttributeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_attributes_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_attributes), + '__call__') as call: + client.list_data_attributes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.ListDataAttributesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_attribute_empty_call_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_attribute), + '__call__') as call: + client.get_data_attribute(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_taxonomy.GetDataAttributeRequest() + + assert args[0] == request_msg + + +def test_data_taxonomy_service_rest_lro_client(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataTaxonomyServiceGrpcTransport, + ) + +def test_data_taxonomy_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataTaxonomyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_taxonomy_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataTaxonomyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_data_taxonomy', + 'update_data_taxonomy', + 'delete_data_taxonomy', + 'list_data_taxonomies', + 'get_data_taxonomy', + 'create_data_attribute_binding', + 'update_data_attribute_binding', + 'delete_data_attribute_binding', + 'list_data_attribute_bindings', + 'get_data_attribute_binding', + 'create_data_attribute', + 'update_data_attribute', + 'delete_data_attribute', + 'list_data_attributes', + 'get_data_attribute', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_taxonomy_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTaxonomyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_taxonomy_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTaxonomyServiceTransport() + adc.assert_called_once() + + +def test_data_taxonomy_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataTaxonomyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, + ], +) +def test_data_taxonomy_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTaxonomyServiceGrpcTransport, + transports.DataTaxonomyServiceGrpcAsyncIOTransport, + transports.DataTaxonomyServiceRestTransport, + ], +) +def test_data_taxonomy_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTaxonomyServiceGrpcTransport, grpc_helpers), + (transports.DataTaxonomyServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_taxonomy_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_data_taxonomy_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DataTaxonomyServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_data_taxonomy_service_host_no_port(transport_name): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_data_taxonomy_service_host_with_port(transport_name): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_data_taxonomy_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataTaxonomyServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataTaxonomyServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_data_taxonomy._session + session2 = client2.transport.create_data_taxonomy._session + assert session1 != session2 + session1 = client1.transport.update_data_taxonomy._session + session2 = client2.transport.update_data_taxonomy._session + assert session1 != session2 + session1 = client1.transport.delete_data_taxonomy._session + session2 = client2.transport.delete_data_taxonomy._session + assert session1 != session2 + session1 = client1.transport.list_data_taxonomies._session + session2 = client2.transport.list_data_taxonomies._session + assert session1 != session2 + session1 = client1.transport.get_data_taxonomy._session + session2 = client2.transport.get_data_taxonomy._session + assert session1 != session2 + session1 = client1.transport.create_data_attribute_binding._session + session2 = client2.transport.create_data_attribute_binding._session + assert session1 != session2 + session1 = client1.transport.update_data_attribute_binding._session + session2 = client2.transport.update_data_attribute_binding._session + assert session1 != session2 + session1 = client1.transport.delete_data_attribute_binding._session + session2 = client2.transport.delete_data_attribute_binding._session + assert session1 != session2 + session1 = client1.transport.list_data_attribute_bindings._session + session2 = client2.transport.list_data_attribute_bindings._session + assert session1 != session2 + session1 = client1.transport.get_data_attribute_binding._session + session2 = client2.transport.get_data_attribute_binding._session + assert session1 != session2 + session1 = client1.transport.create_data_attribute._session + session2 = client2.transport.create_data_attribute._session + assert session1 != session2 + session1 = client1.transport.update_data_attribute._session + session2 = client2.transport.update_data_attribute._session + assert session1 != session2 + session1 = client1.transport.delete_data_attribute._session + session2 = client2.transport.delete_data_attribute._session + assert session1 != session2 + session1 = client1.transport.list_data_attributes._session + session2 = client2.transport.list_data_attributes._session + assert session1 != session2 + session1 = client1.transport.get_data_attribute._session + session2 = client2.transport.get_data_attribute._session + assert session1 != session2 +def test_data_taxonomy_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTaxonomyServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_taxonomy_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) +def test_data_taxonomy_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_taxonomy_service_grpc_lro_client(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_taxonomy_service_grpc_lro_async_client(): + client = DataTaxonomyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_attribute_path(): + project = "squid" + location = "clam" + dataTaxonomy = "whelk" + data_attribute_id = "octopus" + expected = "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) + actual = DataTaxonomyServiceClient.data_attribute_path(project, location, dataTaxonomy, data_attribute_id) + assert expected == actual + + +def test_parse_data_attribute_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "dataTaxonomy": "cuttlefish", + "data_attribute_id": "mussel", + } + path = DataTaxonomyServiceClient.data_attribute_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_attribute_path(path) + assert expected == actual + +def test_data_attribute_binding_path(): + project = "winkle" + location = "nautilus" + data_attribute_binding_id = "scallop" + expected = "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) + actual = DataTaxonomyServiceClient.data_attribute_binding_path(project, location, data_attribute_binding_id) + assert expected == actual + + +def test_parse_data_attribute_binding_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_attribute_binding_id": "clam", + } + path = DataTaxonomyServiceClient.data_attribute_binding_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_attribute_binding_path(path) + assert expected == actual + +def test_data_taxonomy_path(): + project = "whelk" + location = "octopus" + data_taxonomy_id = "oyster" + expected = "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) + actual = DataTaxonomyServiceClient.data_taxonomy_path(project, location, data_taxonomy_id) + assert expected == actual + + +def test_parse_data_taxonomy_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "data_taxonomy_id": "mussel", + } + path = DataTaxonomyServiceClient.data_taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_data_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataTaxonomyServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DataTaxonomyServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataTaxonomyServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DataTaxonomyServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataTaxonomyServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DataTaxonomyServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DataTaxonomyServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DataTaxonomyServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataTaxonomyServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DataTaxonomyServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataTaxonomyServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataTaxonomyServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataTaxonomyServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DataTaxonomyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport), + (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py new file mode 100644 index 000000000000..aade4dcd62f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -0,0 +1,28263 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceAsyncClient +from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceClient +from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.services.dataplex_service import transports +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import resources +from google.cloud.dataplex_v1.types import service +from google.cloud.dataplex_v1.types import tasks +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataplexServiceClient._get_default_mtls_endpoint(None) is None + assert DataplexServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataplexServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataplexServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DataplexServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataplexServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataplexServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataplexServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataplexServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataplexServiceClient._get_client_cert_source(None, False) is None + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DataplexServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DataplexServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT + assert DataplexServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DataplexServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DataplexServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DataplexServiceClient._get_universe_domain(None, None) == DataplexServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DataplexServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataplexServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataplexServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataplexServiceClient, "grpc"), + (DataplexServiceAsyncClient, "grpc_asyncio"), + (DataplexServiceClient, "rest"), +]) +def test_dataplex_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataplexServiceGrpcTransport, "grpc"), + (transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataplexServiceRestTransport, "rest"), +]) +def test_dataplex_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataplexServiceClient, "grpc"), + (DataplexServiceAsyncClient, "grpc_asyncio"), + (DataplexServiceClient, "rest"), +]) +def test_dataplex_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_dataplex_service_client_get_transport_class(): + transport = DataplexServiceClient.get_transport_class() + available_transports = [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceRestTransport, + ] + assert transport in available_transports + + transport = DataplexServiceClient.get_transport_class("grpc") + assert transport == transports.DataplexServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest"), +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test_dataplex_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "true"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "false"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", "true"), + (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dataplex_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataplexServiceClient, DataplexServiceAsyncClient +]) +@mock.patch.object(DataplexServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceAsyncClient)) +def test_dataplex_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DataplexServiceClient, DataplexServiceAsyncClient +]) +@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) +@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) +def test_dataplex_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataplexServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest"), +]) +def test_dataplex_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", None), +]) +def test_dataplex_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_dataplex_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataplexServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_dataplex_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + service.CreateLakeRequest, + dict, +]) +def test_create_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateLakeRequest( + parent='parent_value', + lake_id='lake_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateLakeRequest( + parent='parent_value', + lake_id='lake_id_value', + ) + +def test_create_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc + request = {} + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_lake] = mock_rpc + + request = {} + await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_lake_async(transport: str = 'grpc_asyncio', request_type=service.CreateLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_lake_async_from_dict(): + await test_create_lake_async(request_type=dict) + +def test_create_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateLakeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateLakeRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_lake( + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].lake_id + mock_val = 'lake_id_value' + assert arg == mock_val + + +def test_create_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lake( + service.CreateLakeRequest(), + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + +@pytest.mark.asyncio +async def test_create_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_lake( + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].lake_id + mock_val = 'lake_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_lake( + service.CreateLakeRequest(), + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateLakeRequest, + dict, +]) +def test_update_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateLakeRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateLakeRequest( + ) + +def test_update_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc + request = {} + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_lake] = mock_rpc + + request = {} + await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_lake_async(transport: str = 'grpc_asyncio', request_type=service.UpdateLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_lake_async_from_dict(): + await test_update_lake_async(request_type=dict) + +def test_update_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateLakeRequest() + + request.lake.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'lake.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateLakeRequest() + + request.lake.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'lake.name=name_value', + ) in kw['metadata'] + + +def test_update_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_lake( + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_lake( + service.UpdateLakeRequest(), + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_lake( + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].lake + mock_val = resources.Lake(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_lake( + service.UpdateLakeRequest(), + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteLakeRequest, + dict, +]) +def test_delete_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteLakeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteLakeRequest( + name='name_value', + ) + +def test_delete_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc + request = {} + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_lake] = mock_rpc + + request = {} + await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_lake_async(transport: str = 'grpc_asyncio', request_type=service.DeleteLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_lake_async_from_dict(): + await test_delete_lake_async(request_type=dict) + +def test_delete_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lake( + service.DeleteLakeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_lake( + service.DeleteLakeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakesRequest, + dict, +]) +def test_list_lakes(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListLakesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_lakes_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListLakesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_lakes(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListLakesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_lakes_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lakes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc + request = {} + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_lakes in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_lakes] = mock_rpc + + request = {} + await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lakes_async(transport: str = 'grpc_asyncio', request_type=service.ListLakesRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListLakesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_lakes_async_from_dict(): + await test_list_lakes_async(request_type=dict) + +def test_list_lakes_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = service.ListLakesResponse() + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_lakes_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) + await client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_lakes_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_lakes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_lakes_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lakes( + service.ListLakesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_lakes_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListLakesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_lakes( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_lakes_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_lakes( + service.ListLakesRequest(), + parent='parent_value', + ) + + +def test_list_lakes_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_lakes(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Lake) + for i in results) +def test_list_lakes_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + pages = list(client.list_lakes(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_lakes_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_lakes(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Lake) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_lakes_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_lakes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetLakeRequest, + dict, +]) +def test_get_lake(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + ) + response = client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Lake) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.service_account == 'service_account_value' + + +def test_get_lake_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetLakeRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_lake(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetLakeRequest( + name='name_value', + ) + +def test_get_lake_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc + request = {} + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_lake in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_lake] = mock_rpc + + request = {} + await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_lake_async(transport: str = 'grpc_asyncio', request_type=service.GetLakeRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + )) + response = await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetLakeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Lake) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_lake_async_from_dict(): + await test_get_lake_async(request_type=dict) + +def test_get_lake_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = resources.Lake() + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_lake_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetLakeRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) + await client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_lake_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_lake_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_lake( + service.GetLakeRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_lake_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Lake() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_lake( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_lake_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_lake( + service.GetLakeRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakeActionsRequest, + dict, +]) +def test_list_lake_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListLakeActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakeActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_lake_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListLakeActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_lake_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListLakeActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_lake_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lake_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc + request = {} + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lake_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_lake_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_lake_actions] = mock_rpc + + request = {} + await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_lake_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListLakeActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListLakeActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakeActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_lake_actions_async_from_dict(): + await test_list_lake_actions_async(request_type=dict) + +def test_list_lake_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakeActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_lake_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListLakeActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_lake_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_lake_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_lake_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lake_actions( + service.ListLakeActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_lake_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_lake_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_lake_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_lake_actions( + service.ListLakeActionsRequest(), + parent='parent_value', + ) + + +def test_list_lake_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_lake_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_lake_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_lake_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_lake_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_lake_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_lake_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_lake_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateZoneRequest, + dict, +]) +def test_create_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateZoneRequest( + parent='parent_value', + zone_id='zone_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateZoneRequest( + parent='parent_value', + zone_id='zone_id_value', + ) + +def test_create_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_zone] = mock_rpc + + request = {} + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_zone_async(transport: str = 'grpc_asyncio', request_type=service.CreateZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_zone_async_from_dict(): + await test_create_zone_async(request_type=dict) + +def test_create_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateZoneRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_zone( + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].zone_id + mock_val = 'zone_id_value' + assert arg == mock_val + + +def test_create_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_zone( + service.CreateZoneRequest(), + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + +@pytest.mark.asyncio +async def test_create_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_zone( + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].zone_id + mock_val = 'zone_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_zone( + service.CreateZoneRequest(), + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateZoneRequest, + dict, +]) +def test_update_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateZoneRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateZoneRequest( + ) + +def test_update_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_zone] = mock_rpc + + request = {} + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_zone_async(transport: str = 'grpc_asyncio', request_type=service.UpdateZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_zone_async_from_dict(): + await test_update_zone_async(request_type=dict) + +def test_update_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'zone.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateZoneRequest() + + request.zone.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'zone.name=name_value', + ) in kw['metadata'] + + +def test_update_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_zone( + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_zone( + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].zone + mock_val = resources.Zone(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteZoneRequest, + dict, +]) +def test_delete_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteZoneRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteZoneRequest( + name='name_value', + ) + +def test_delete_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_zone] = mock_rpc + + request = {} + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_zone_async(transport: str = 'grpc_asyncio', request_type=service.DeleteZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_zone_async_from_dict(): + await test_delete_zone_async(request_type=dict) + +def test_delete_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_zone( + service.DeleteZoneRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_zone( + service.DeleteZoneRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListZonesRequest, + dict, +]) +def test_list_zones(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_zones_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZonesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_zones(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZonesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_zones_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_zones in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_zones] = mock_rpc + + request = {} + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zones_async(transport: str = 'grpc_asyncio', request_type=service.ListZonesRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListZonesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_zones_async_from_dict(): + await test_list_zones_async(request_type=dict) + +def test_list_zones_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_zones_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZonesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) + await client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_zones_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_zones( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_zones_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zones( + service.ListZonesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_zones_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListZonesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_zones( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_zones_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_zones( + service.ListZonesRequest(), + parent='parent_value', + ) + + +def test_list_zones_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_zones(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) + for i in results) +def test_list_zones_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zones(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_zones_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zones(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Zone) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_zones_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zones(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetZoneRequest, + dict, +]) +def test_get_zone(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + ) + response = client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == resources.Zone.Type.RAW + + +def test_get_zone_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetZoneRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_zone(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetZoneRequest( + name='name_value', + ) + +def test_get_zone_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_zone in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_zone] = mock_rpc + + request = {} + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_zone_async(transport: str = 'grpc_asyncio', request_type=service.GetZoneRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + )) + response = await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetZoneRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == resources.Zone.Type.RAW + + +@pytest.mark.asyncio +async def test_get_zone_async_from_dict(): + await test_get_zone_async(request_type=dict) + +def test_get_zone_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = resources.Zone() + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_zone_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetZoneRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + await client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_zone_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_zone_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_zone( + service.GetZoneRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_zone_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Zone() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_zone( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_zone_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_zone( + service.GetZoneRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListZoneActionsRequest, + dict, +]) +def test_list_zone_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListZoneActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZoneActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_zone_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListZoneActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_zone_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListZoneActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_zone_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zone_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc + request = {} + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zone_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_zone_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_zone_actions] = mock_rpc + + request = {} + await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_zone_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListZoneActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListZoneActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZoneActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_zone_actions_async_from_dict(): + await test_list_zone_actions_async(request_type=dict) + +def test_list_zone_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZoneActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_zone_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListZoneActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_zone_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_zone_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_zone_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zone_actions( + service.ListZoneActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_zone_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_zone_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_zone_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_zone_actions( + service.ListZoneActionsRequest(), + parent='parent_value', + ) + + +def test_list_zone_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_zone_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_zone_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_zone_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_zone_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_zone_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_zone_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_zone_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateAssetRequest, + dict, +]) +def test_create_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateAssetRequest( + parent='parent_value', + asset_id='asset_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateAssetRequest( + parent='parent_value', + asset_id='asset_id_value', + ) + +def test_create_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc + request = {} + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_asset] = mock_rpc + + request = {} + await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_asset_async(transport: str = 'grpc_asyncio', request_type=service.CreateAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_asset_async_from_dict(): + await test_create_asset_async(request_type=dict) + +def test_create_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateAssetRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateAssetRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_asset( + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].asset_id + mock_val = 'asset_id_value' + assert arg == mock_val + + +def test_create_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_asset( + service.CreateAssetRequest(), + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + +@pytest.mark.asyncio +async def test_create_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_asset( + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].asset_id + mock_val = 'asset_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_asset( + service.CreateAssetRequest(), + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateAssetRequest, + dict, +]) +def test_update_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateAssetRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateAssetRequest( + ) + +def test_update_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc + request = {} + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_asset] = mock_rpc + + request = {} + await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_asset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_asset_async_from_dict(): + await test_update_asset_async(request_type=dict) + +def test_update_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateAssetRequest() + + request.asset.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'asset.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateAssetRequest() + + request.asset.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'asset.name=name_value', + ) in kw['metadata'] + + +def test_update_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_asset( + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_asset( + service.UpdateAssetRequest(), + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_asset( + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].asset + mock_val = resources.Asset(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_asset( + service.UpdateAssetRequest(), + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteAssetRequest, + dict, +]) +def test_delete_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteAssetRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteAssetRequest( + name='name_value', + ) + +def test_delete_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc + request = {} + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_asset] = mock_rpc + + request = {} + await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_asset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_asset_async_from_dict(): + await test_delete_asset_async(request_type=dict) + +def test_delete_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_asset( + service.DeleteAssetRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_asset( + service.DeleteAssetRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetsRequest, + dict, +]) +def test_list_assets(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListAssetsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc + + request = {} + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListAssetsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_assets_async_from_dict(): + await test_list_assets_async(request_type=dict) + +def test_list_assets_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = service.ListAssetsResponse() + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_assets_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_assets_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_assets_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + service.ListAssetsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_assets_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListAssetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_assets_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_assets( + service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_assets(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Asset) + for i in results) +def test_list_assets_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_assets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_assets_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_assets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Asset) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_assets_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_assets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetAssetRequest, + dict, +]) +def test_get_asset(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + response = client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Asset) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +def test_get_asset_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetAssetRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_asset(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetAssetRequest( + name='name_value', + ) + +def test_get_asset_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc + request = {} + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_asset in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_asset] = mock_rpc + + request = {} + await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_asset_async(transport: str = 'grpc_asyncio', request_type=service.GetAssetRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + response = await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetAssetRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Asset) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_asset_async_from_dict(): + await test_get_asset_async(request_type=dict) + +def test_get_asset_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = resources.Asset() + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_asset_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetAssetRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) + await client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_asset_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_asset_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_asset( + service.GetAssetRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_asset_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Asset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_asset( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_asset_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_asset( + service.GetAssetRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetActionsRequest, + dict, +]) +def test_list_asset_actions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListAssetActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_asset_actions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListAssetActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_asset_actions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListAssetActionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_asset_actions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_asset_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc + request = {} + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_asset_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_asset_actions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_asset_actions] = mock_rpc + + request = {} + await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_asset_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetActionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListAssetActionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetActionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_asset_actions_async_from_dict(): + await test_list_asset_actions_async(request_type=dict) + +def test_list_asset_actions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_asset_actions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListAssetActionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + await client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_asset_actions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_asset_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_asset_actions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_asset_actions( + service.ListAssetActionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_asset_actions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListActionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_asset_actions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_asset_actions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_asset_actions( + service.ListAssetActionsRequest(), + parent='parent_value', + ) + + +def test_list_asset_actions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_asset_actions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) +def test_list_asset_actions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = list(client.list_asset_actions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_asset_actions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_asset_actions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Action) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_asset_actions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_asset_actions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.CreateTaskRequest, + dict, +]) +def test_create_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateTaskRequest( + parent='parent_value', + task_id='task_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateTaskRequest( + parent='parent_value', + task_id='task_id_value', + ) + +def test_create_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_task] = mock_rpc + request = {} + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_task] = mock_rpc + + request = {} + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=service.CreateTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + +def test_create_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateTaskRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].task_id + mock_val = 'task_id_value' + assert arg == mock_val + + +def test_create_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + service.CreateTaskRequest(), + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].task_id + mock_val = 'task_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + service.CreateTaskRequest(), + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateTaskRequest, + dict, +]) +def test_update_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateTaskRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateTaskRequest( + ) + +def test_update_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_task] = mock_rpc + request = {} + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_task] = mock_rpc + + request = {} + await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_task_async(transport: str = 'grpc_asyncio', request_type=service.UpdateTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_task_async_from_dict(): + await test_update_task_async(request_type=dict) + +def test_update_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTaskRequest() + + request.task.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'task.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateTaskRequest() + + request.task.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'task.name=name_value', + ) in kw['metadata'] + + +def test_update_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_task( + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_task( + service.UpdateTaskRequest(), + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_task( + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].task + mock_val = tasks.Task(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_task( + service.UpdateTaskRequest(), + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteTaskRequest, + dict, +]) +def test_delete_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteTaskRequest( + name='name_value', + ) + +def test_delete_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc + request = {} + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_task] = mock_rpc + + request = {} + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=service.DeleteTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + +def test_delete_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + service.DeleteTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + service.DeleteTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListTasksRequest, + dict, +]) +def test_list_tasks(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +def test_list_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListTasksRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTasksRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc + request = {} + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_tasks in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc + + request = {} + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=service.ListTasksRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + +def test_list_tasks_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = service.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTasksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_tasks_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_tasks_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + service.ListTasksRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + service.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Task) + for i in results) +def test_list_tasks_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tasks.Task) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetTaskRequest, + dict, +]) +def test_get_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Task) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + + +def test_get_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTaskRequest( + name='name_value', + ) + +def test_get_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + request = {} + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc + + request = {} + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=service.GetTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + )) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Task) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + +def test_get_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = tasks.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + service.GetTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + service.GetTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListJobsRequest, + dict, +]) +def test_list_jobs(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListJobsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListJobsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_jobs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc + + request = {} + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=service.ListJobsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + +def test_list_jobs_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = service.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + service.ListJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + service.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Job) + for i in results) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tasks.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.RunTaskRequest, + dict, +]) +def test_run_task(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse( + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.RunTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.RunTaskResponse) + + +def test_run_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.RunTaskRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.run_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RunTaskRequest( + name='name_value', + ) + +def test_run_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_task] = mock_rpc + request = {} + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.run_task in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.run_task] = mock_rpc + + request = {} + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=service.RunTaskRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( + )) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.RunTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.RunTaskResponse) + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + +def test_run_task_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RunTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = service.RunTaskResponse() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RunTaskRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_run_task_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_run_task_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + service.RunTaskRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.RunTaskResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + service.RunTaskRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.GetJobRequest, + dict, +]) +def test_get_job(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Job) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == tasks.Job.State.RUNNING + assert response.retry_count == 1214 + assert response.service == tasks.Job.Service.DATAPROC + assert response.service_job == 'service_job_value' + assert response.message == 'message_value' + assert response.trigger == tasks.Job.Trigger.TASK_CONFIG + + +def test_get_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetJobRequest( + name='name_value', + ) + +def test_get_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc + + request = {} + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=service.GetJobRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Job) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == tasks.Job.State.RUNNING + assert response.retry_count == 1214 + assert response.service == tasks.Job.Service.DATAPROC + assert response.service_job == 'service_job_value' + assert response.message == 'message_value' + assert response.trigger == tasks.Job.Trigger.TASK_CONFIG + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + +def test_get_job_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = tasks.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + service.GetJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tasks.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + service.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.CancelJobRequest, + dict, +]) +def test_cancel_job(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CancelJobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.cancel_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CancelJobRequest( + name='name_value', + ) + +def test_cancel_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.cancel_job in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc + + request = {} + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=service.CancelJobRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) + +def test_cancel_job_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = None + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CancelJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_cancel_job_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_cancel_job_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + service.CancelJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_cancel_job_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_job_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_job( + service.CancelJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.CreateEnvironmentRequest, + dict, +]) +def test_create_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateEnvironmentRequest( + parent='parent_value', + environment_id='environment_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest( + parent='parent_value', + environment_id='environment_id_value', + ) + +def test_create_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc + request = {} + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_environment] = mock_rpc + + request = {} + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_environment_async(transport: str = 'grpc_asyncio', request_type=service.CreateEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_environment_async_from_dict(): + await test_create_environment_async(request_type=dict) + +def test_create_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_environment( + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].environment_id + mock_val = 'environment_id_value' + assert arg == mock_val + + +def test_create_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + service.CreateEnvironmentRequest(), + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + +@pytest.mark.asyncio +async def test_create_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_environment( + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].environment_id + mock_val = 'environment_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_environment( + service.CreateEnvironmentRequest(), + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateEnvironmentRequest, + dict, +]) +def test_update_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateEnvironmentRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateEnvironmentRequest( + ) + +def test_update_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc + request = {} + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_environment] = mock_rpc + + request = {} + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_environment_async(transport: str = 'grpc_asyncio', request_type=service.UpdateEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.UpdateEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_environment_async_from_dict(): + await test_update_environment_async(request_type=dict) + +def test_update_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'environment.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'environment.name=name_value', + ) in kw['metadata'] + + +def test_update_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_environment( + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_environment( + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteEnvironmentRequest, + dict, +]) +def test_delete_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.DeleteEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteEnvironmentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest( + name='name_value', + ) + +def test_delete_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc + request = {} + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_environment] = mock_rpc + + request = {} + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_environment_async(transport: str = 'grpc_asyncio', request_type=service.DeleteEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.DeleteEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_environment_async_from_dict(): + await test_delete_environment_async(request_type=dict) + +def test_delete_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_environment( + service.DeleteEnvironmentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_environment( + service.DeleteEnvironmentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListEnvironmentsRequest, + dict, +]) +def test_list_environments(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListEnvironmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_environments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListEnvironmentsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_environments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + order_by='order_by_value', + ) + +def test_list_environments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_environments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc + request = {} + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_environments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_environments in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_environments] = mock_rpc + + request = {} + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_environments_async(transport: str = 'grpc_asyncio', request_type=service.ListEnvironmentsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListEnvironmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_environments_async_from_dict(): + await test_list_environments_async(request_type=dict) + +def test_list_environments_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_environments_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_environments_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_environments( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_environments_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + service.ListEnvironmentsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_environments_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_environments( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_environments_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_environments( + service.ListEnvironmentsRequest(), + parent='parent_value', + ) + + +def test_list_environments_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_environments(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Environment) + for i in results) +def test_list_environments_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_environments(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_environments_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_environments(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Environment) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_environments_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_environments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + service.GetEnvironmentRequest, + dict, +]) +def test_get_environment(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + response = client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +def test_get_environment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetEnvironmentRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_environment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest( + name='name_value', + ) + +def test_get_environment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc + request = {} + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_environment in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_environment] = mock_rpc + + request = {} + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_environment_async(transport: str = 'grpc_asyncio', request_type=service.GetEnvironmentRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + response = await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetEnvironmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_environment_async_from_dict(): + await test_get_environment_async(request_type=dict) + +def test_get_environment_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = analyze.Environment() + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_environment_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_environment_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + service.GetEnvironmentRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_environment( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_environment( + service.GetEnvironmentRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + service.ListSessionsRequest, + dict, +]) +def test_list_sessions(request_type, transport: str = 'grpc'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListSessionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSessionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=service.ListSessionsRequest): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + +def test_list_sessions_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = service.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_sessions_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + service.ListSessionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + service.ListSessionsRequest(), + parent='parent_value', + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_sessions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Session) + for i in results) +def test_list_sessions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Session) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_lake_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc + + request = {} + client.create_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_lake_rest_required_fields(request_type=service.CreateLakeRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["lake_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "lakeId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "lakeId" in jsonified_request + assert jsonified_request["lakeId"] == request_init["lake_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["lakeId"] = 'lake_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lake._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("lake_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "lakeId" in jsonified_request + assert jsonified_request["lakeId"] == 'lake_id_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_lake(request) + + expected_params = [ + ( + "lakeId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_lake_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_lake._get_unset_required_fields({}) + assert set(unset_fields) == (set(("lakeId", "validateOnly", )) & set(("parent", "lakeId", "lake", ))) + + +def test_create_lake_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_lake(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/lakes" % client.transport._host, args[1]) + + +def test_create_lake_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lake( + service.CreateLakeRequest(), + parent='parent_value', + lake=resources.Lake(name='name_value'), + lake_id='lake_id_value', + ) + + +def test_update_lake_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc + + request = {} + client.update_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_lake_rest_required_fields(request_type=service.UpdateLakeRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_lake._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_lake(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_lake_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_lake._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "lake", ))) + + +def test_update_lake_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_lake(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{lake.name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) + + +def test_update_lake_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_lake( + service.UpdateLakeRequest(), + lake=resources.Lake(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_lake_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc + + request = {} + client.delete_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_lake_rest_required_fields(request_type=service.DeleteLakeRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_lake(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_lake_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_lake._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_lake_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_lake(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) + + +def test_delete_lake_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lake( + service.DeleteLakeRequest(), + name='name_value', + ) + + +def test_list_lakes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lakes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc + + request = {} + client.list_lakes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lakes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_lakes_rest_required_fields(request_type=service.ListLakesRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lakes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lakes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListLakesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListLakesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_lakes(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_lakes_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_lakes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_lakes_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListLakesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListLakesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_lakes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/lakes" % client.transport._host, args[1]) + + +def test_list_lakes_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lakes( + service.ListLakesRequest(), + parent='parent_value', + ) + + +def test_list_lakes_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + resources.Lake(), + ], + next_page_token='abc', + ), + service.ListLakesResponse( + lakes=[], + next_page_token='def', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + ], + next_page_token='ghi', + ), + service.ListLakesResponse( + lakes=[ + resources.Lake(), + resources.Lake(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListLakesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_lakes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Lake) + for i in results) + + pages = list(client.list_lakes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_lake_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_lake in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc + + request = {} + client.get_lake(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_lake(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_lake_rest_required_fields(request_type=service.GetLakeRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lake._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Lake() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Lake.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_lake(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_lake_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_lake._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_lake_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Lake() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Lake.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_lake(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) + + +def test_get_lake_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_lake( + service.GetLakeRequest(), + name='name_value', + ) + + +def test_list_lake_actions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_lake_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc + + request = {} + client.list_lake_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_lake_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_lake_actions_rest_required_fields(request_type=service.ListLakeActionsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lake_actions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lake_actions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_lake_actions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_lake_actions_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_lake_actions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_lake_actions_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_lake_actions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/actions" % client.transport._host, args[1]) + + +def test_list_lake_actions_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lake_actions( + service.ListLakeActionsRequest(), + parent='parent_value', + ) + + +def test_list_lake_actions_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListActionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + pager = client.list_lake_actions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) + + pages = list(client.list_lake_actions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc + + request = {} + client.create_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_zone_rest_required_fields(request_type=service.CreateZoneRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["zone_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "zoneId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "zoneId" in jsonified_request + assert jsonified_request["zoneId"] == request_init["zone_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["zoneId"] = 'zone_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_zone._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", "zone_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "zoneId" in jsonified_request + assert jsonified_request["zoneId"] == 'zone_id_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_zone(request) + + expected_params = [ + ( + "zoneId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_zone_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", "zoneId", )) & set(("parent", "zoneId", "zone", ))) + + +def test_create_zone_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/zones" % client.transport._host, args[1]) + + +def test_create_zone_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_zone( + service.CreateZoneRequest(), + parent='parent_value', + zone=resources.Zone(name='name_value'), + zone_id='zone_id_value', + ) + + +def test_update_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc + + request = {} + client.update_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_zone_rest_required_fields(request_type=service.UpdateZoneRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_zone._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_zone(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_zone_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "zone", ))) + + +def test_update_zone_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{zone.name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) + + +def test_update_zone_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_zone( + service.UpdateZoneRequest(), + zone=resources.Zone(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc + + request = {} + client.delete_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_zone_rest_required_fields(request_type=service.DeleteZoneRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_zone(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_zone_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_zone_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) + + +def test_delete_zone_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_zone( + service.DeleteZoneRequest(), + name='name_value', + ) + + +def test_list_zones_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zones in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc + + request = {} + client.list_zones(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zones(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_zones_rest_required_fields(request_type=service.ListZonesRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zones._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zones._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_zones(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_zones_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_zones._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_zones_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_zones(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/zones" % client.transport._host, args[1]) + + +def test_list_zones_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zones( + service.ListZonesRequest(), + parent='parent_value', + ) + + +def test_list_zones_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + resources.Zone(), + ], + next_page_token='abc', + ), + service.ListZonesResponse( + zones=[], + next_page_token='def', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + ], + next_page_token='ghi', + ), + service.ListZonesResponse( + zones=[ + resources.Zone(), + resources.Zone(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListZonesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + pager = client.list_zones(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Zone) + for i in results) + + pages = list(client.list_zones(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_zone_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_zone in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc + + request = {} + client.get_zone(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_zone(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_zone_rest_required_fields(request_type=service.GetZoneRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_zone._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Zone() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_zone(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_zone_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_zone._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_zone_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Zone() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_zone(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) + + +def test_get_zone_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_zone( + service.GetZoneRequest(), + name='name_value', + ) + + +def test_list_zone_actions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_zone_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc + + request = {} + client.list_zone_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_zone_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_zone_actions_rest_required_fields(request_type=service.ListZoneActionsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zone_actions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zone_actions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_zone_actions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_zone_actions_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_zone_actions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_zone_actions_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_zone_actions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/actions" % client.transport._host, args[1]) + + +def test_list_zone_actions_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_zone_actions( + service.ListZoneActionsRequest(), + parent='parent_value', + ) + + +def test_list_zone_actions_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListActionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + pager = client.list_zone_actions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) + + pages = list(client.list_zone_actions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_asset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc + + request = {} + client.create_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_asset_rest_required_fields(request_type=service.CreateAssetRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["asset_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "assetId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "assetId" in jsonified_request + assert jsonified_request["assetId"] == request_init["asset_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["assetId"] = 'asset_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_asset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "assetId" in jsonified_request + assert jsonified_request["assetId"] == 'asset_id_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_asset(request) + + expected_params = [ + ( + "assetId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_asset_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_asset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetId", "validateOnly", )) & set(("parent", "assetId", "asset", ))) + + +def test_create_asset_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_asset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets" % client.transport._host, args[1]) + + +def test_create_asset_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_asset( + service.CreateAssetRequest(), + parent='parent_value', + asset=resources.Asset(name='name_value'), + asset_id='asset_id_value', + ) + + +def test_update_asset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc + + request = {} + client.update_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_asset_rest_required_fields(request_type=service.UpdateAssetRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_asset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_asset(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_asset_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_asset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "asset", ))) + + +def test_update_asset_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} + + # get truthy value for each flattened field + mock_args = dict( + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_asset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{asset.name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) + + +def test_update_asset_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_asset( + service.UpdateAssetRequest(), + asset=resources.Asset(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_asset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc + + request = {} + client.delete_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_asset_rest_required_fields(request_type=service.DeleteAssetRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_asset(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_asset_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_asset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_asset_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_asset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) + + +def test_delete_asset_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_asset( + service.DeleteAssetRequest(), + name='name_value', + ) + + +def test_list_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_assets_rest_required_fields(request_type=service.ListAssetsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListAssetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_assets(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_assets_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_assets_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_assets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets" % client.transport._host, args[1]) + + +def test_list_assets_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + resources.Asset(), + ], + next_page_token='abc', + ), + service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + ], + next_page_token='ghi', + ), + service.ListAssetsResponse( + assets=[ + resources.Asset(), + resources.Asset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + pager = client.list_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Asset) + for i in results) + + pages = list(client.list_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_asset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_asset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc + + request = {} + client.get_asset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_asset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_asset_rest_required_fields(request_type=service.GetAssetRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_asset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Asset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Asset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_asset(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_asset_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_asset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_asset_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Asset() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Asset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_asset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) + + +def test_get_asset_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_asset( + service.GetAssetRequest(), + name='name_value', + ) + + +def test_list_asset_actions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_asset_actions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc + + request = {} + client.list_asset_actions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_asset_actions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_asset_actions_rest_required_fields(request_type=service.ListAssetActionsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_asset_actions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_asset_actions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_asset_actions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_asset_actions_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_asset_actions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_asset_actions_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_asset_actions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/assets/*}/actions" % client.transport._host, args[1]) + + +def test_list_asset_actions_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_asset_actions( + service.ListAssetActionsRequest(), + parent='parent_value', + ) + + +def test_list_asset_actions_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + resources.Action(), + ], + next_page_token='abc', + ), + service.ListActionsResponse( + actions=[], + next_page_token='def', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + ], + next_page_token='ghi', + ), + service.ListActionsResponse( + actions=[ + resources.Action(), + resources.Action(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListActionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + + pager = client.list_asset_actions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Action) + for i in results) + + pages = list(client.list_asset_actions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_task] = mock_rpc + + request = {} + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_task_rest_required_fields(request_type=service.CreateTaskRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["task_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "taskId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "taskId" in jsonified_request + assert jsonified_request["taskId"] == request_init["task_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["taskId"] = 'task_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_task._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("task_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "taskId" in jsonified_request + assert jsonified_request["taskId"] == 'task_id_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_task(request) + + expected_params = [ + ( + "taskId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_task_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(("taskId", "validateOnly", )) & set(("parent", "taskId", "task", ))) + + +def test_create_task_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/tasks" % client.transport._host, args[1]) + + +def test_create_task_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + service.CreateTaskRequest(), + parent='parent_value', + task=tasks.Task(name='name_value'), + task_id='task_id_value', + ) + + +def test_update_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_task] = mock_rpc + + request = {} + client.update_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_task_rest_required_fields(request_type=service.UpdateTaskRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_task._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_task_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "task", ))) + + +def test_update_task_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{task.name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) + + +def test_update_task_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_task( + service.UpdateTaskRequest(), + task=tasks.Task(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc + + request = {} + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_task_rest_required_fields(request_type=service.DeleteTaskRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_task_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_task_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) + + +def test_delete_task_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + service.DeleteTaskRequest(), + name='name_value', + ) + + +def test_list_tasks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc + + request = {} + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_tasks_rest_required_fields(request_type=service.ListTasksRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_tasks(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_tasks_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_tasks._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_tasks_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/tasks" % client.transport._host, args[1]) + + +def test_list_tasks_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + service.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + tasks.Task(), + ], + next_page_token='abc', + ), + service.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + ], + next_page_token='ghi', + ), + service.ListTasksResponse( + tasks=[ + tasks.Task(), + tasks.Task(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListTasksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + pager = client.list_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Task) + for i in results) + + pages = list(client.list_tasks(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + + request = {} + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_task_rest_required_fields(request_type=service.GetTaskRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tasks.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tasks.Task.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_task_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_task_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = tasks.Task() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tasks.Task.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) + + +def test_get_task_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + service.GetTaskRequest(), + name='name_value', + ) + + +def test_list_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_jobs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + + request = {} + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_jobs_rest_required_fields(request_type=service.ListJobsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_jobs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_jobs_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/tasks/*}/jobs" % client.transport._host, args[1]) + + +def test_list_jobs_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + service.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + tasks.Job(), + ], + next_page_token='abc', + ), + service.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + ], + next_page_token='ghi', + ), + service.ListJobsResponse( + jobs=[ + tasks.Job(), + tasks.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tasks.Job) + for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_run_task_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.run_task] = mock_rpc + + request = {} + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_task(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_run_task_rest_required_fields(request_type=service.RunTaskRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.RunTaskResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.RunTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.run_task(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_task_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_run_task_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.RunTaskResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.RunTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.run_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}:run" % client.transport._host, args[1]) + + +def test_run_task_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + service.RunTaskRequest(), + name='name_value', + ) + + +def test_get_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + + request = {} + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_job_rest_required_fields(request_type=service.GetJobRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tasks.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tasks.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_job_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = tasks.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tasks.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}" % client.transport._host, args[1]) + + +def test_get_job_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + service.GetJobRequest(), + name='name_value', + ) + + +def test_cancel_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_job_rest_required_fields(request_type=service.CancelJobRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_job(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_cancel_job_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.cancel_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}:cancel" % client.transport._host, args[1]) + + +def test_cancel_job_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + service.CancelJobRequest(), + name='name_value', + ) + + +def test_create_environment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc + + request = {} + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_environment_rest_required_fields(request_type=service.CreateEnvironmentRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["environment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "environmentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "environmentId" in jsonified_request + assert jsonified_request["environmentId"] == request_init["environment_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["environmentId"] = 'environment_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_environment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("environment_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "environmentId" in jsonified_request + assert jsonified_request["environmentId"] == 'environment_id_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_environment(request) + + expected_params = [ + ( + "environmentId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_environment_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_environment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("environmentId", "validateOnly", )) & set(("parent", "environmentId", "environment", ))) + + +def test_create_environment_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/environments" % client.transport._host, args[1]) + + +def test_create_environment_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + service.CreateEnvironmentRequest(), + parent='parent_value', + environment=analyze.Environment(name='name_value'), + environment_id='environment_id_value', + ) + + +def test_update_environment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc + + request = {} + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_environment_rest_required_fields(request_type=service.UpdateEnvironmentRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_environment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_environment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_environment_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_environment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "environment", ))) + + +def test_update_environment_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{environment.name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) + + +def test_update_environment_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_environment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc + + request = {} + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_environment_rest_required_fields(request_type=service.DeleteEnvironmentRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_environment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_environment_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_environment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_environment_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) + + +def test_delete_environment_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_environment( + service.DeleteEnvironmentRequest(), + name='name_value', + ) + + +def test_list_environments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_environments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc + + request = {} + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_environments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_environments_rest_required_fields(request_type=service.ListEnvironmentsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_environments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_environments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListEnvironmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_environments(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_environments_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_environments._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_environments_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListEnvironmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_environments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/environments" % client.transport._host, args[1]) + + +def test_list_environments_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + service.ListEnvironmentsRequest(), + parent='parent_value', + ) + + +def test_list_environments_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token='abc', + ), + service.ListEnvironmentsResponse( + environments=[], + next_page_token='def', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + ], + next_page_token='ghi', + ), + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListEnvironmentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + + pager = client.list_environments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Environment) + for i in results) + + pages = list(client.list_environments(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_environment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_environment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc + + request = {} + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_environment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_environment_rest_required_fields(request_type=service.GetEnvironmentRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analyze.Environment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_environment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_environment_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_environment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_environment_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Environment() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analyze.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) + + +def test_get_environment_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + service.GetEnvironmentRequest(), + name='name_value', + ) + + +def test_list_sessions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sessions_rest_required_fields(request_type=service.ListSessionsRequest): + transport_class = transports.DataplexServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sessions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_sessions_rest_unset_required_fields(): + transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_sessions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_sessions_rest_flattened(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/environments/*}/sessions" % client.transport._host, args[1]) + + +def test_list_sessions_rest_flattened_error(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + service.ListSessionsRequest(), + parent='parent_value', + ) + + +def test_list_sessions_rest_pager(transport: str = 'rest'): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + analyze.Session(), + ], + next_page_token='abc', + ), + service.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + ], + next_page_token='ghi', + ), + service.ListSessionsResponse( + sessions=[ + analyze.Session(), + analyze.Session(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListSessionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + + pager = client.list_sessions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyze.Session) + for i in results) + + pages = list(client.list_sessions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataplexServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataplexServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataplexServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataplexServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, + transports.DataplexServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DataplexServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lakes_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + call.return_value = service.ListLakesResponse() + client.list_lakes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lake_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + call.return_value = resources.Lake() + client.get_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lake_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_lake_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakeActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zones_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + call.return_value = service.ListZonesResponse() + client.list_zones(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZonesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_zone_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + call.return_value = resources.Zone() + client.get_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zone_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_zone_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZoneActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = service.ListAssetsResponse() + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_asset_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + call.return_value = resources.Asset() + client.get_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_asset_actions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + call.return_value = service.ListActionsResponse() + client.list_asset_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tasks_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = service.ListTasksResponse() + client.list_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListTasksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = tasks.Task() + client.get_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_jobs_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = service.ListJobsResponse() + client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_task_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = service.RunTaskResponse() + client.run_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.RunTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = tasks.Job() + client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + call.return_value = None + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_environments_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListEnvironmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_environment_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + call.return_value = analyze.Environment() + client.get_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = service.ListSessionsResponse() + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListSessionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataplexServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lakes_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_lakes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_lake_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + )) + await client.get_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lake_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_lake_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakeActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_zones_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( + next_page_token='next_page_token_value', + )) + await client.list_zones(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZonesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_zone_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + )) + await client.get_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_zone_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_zone_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZoneActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_assets_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_asset_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + await client.get_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_asset_actions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_asset_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_tasks_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + )) + await client.list_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListTasksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + )) + await client.get_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_jobs_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( + next_page_token='next_page_token_value', + )) + await client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_task_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( + )) + await client.run_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.RunTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_job_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + )) + await client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_job_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_environments_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + )) + await client.list_environments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListEnvironmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_environment_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + )) + await client.get_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sessions_empty_call_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListSessionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataplexServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_lake_rest_bad_request(request_type=service.CreateLakeRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_lake(request) + + +@pytest.mark.parametrize("request_type", [ + service.CreateLakeRequest, + dict, +]) +def test_create_lake_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["lake"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'service_account': 'service_account_value', 'metastore': {'service': 'service_value'}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}, 'metastore_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'endpoint': 'endpoint_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateLakeRequest.meta.fields["lake"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["lake"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["lake"][field])): + del request_init["lake"][field][i][subfield] + else: + del request_init["lake"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_lake(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_lake_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_lake") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_lake_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_lake") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateLakeRequest.pb(service.CreateLakeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.CreateLakeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_lake_rest_bad_request(request_type=service.UpdateLakeRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_lake(request) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateLakeRequest, + dict, +]) +def test_update_lake_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} + request_init["lake"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'service_account': 'service_account_value', 'metastore': {'service': 'service_value'}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}, 'metastore_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'endpoint': 'endpoint_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateLakeRequest.meta.fields["lake"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["lake"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["lake"][field])): + del request_init["lake"][field][i][subfield] + else: + del request_init["lake"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_lake(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_lake_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_lake") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_lake_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_lake") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateLakeRequest.pb(service.UpdateLakeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateLakeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_lake_rest_bad_request(request_type=service.DeleteLakeRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_lake(request) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteLakeRequest, + dict, +]) +def test_delete_lake_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_lake(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_lake_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_lake") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_lake_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_lake") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteLakeRequest.pb(service.DeleteLakeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteLakeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_lakes_rest_bad_request(request_type=service.ListLakesRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_lakes(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakesRequest, + dict, +]) +def test_list_lakes_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListLakesResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListLakesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_lakes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_lakes_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lakes") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lakes_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_lakes") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListLakesRequest.pb(service.ListLakesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListLakesResponse.to_json(service.ListLakesResponse()) + req.return_value.content = return_value + + request = service.ListLakesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListLakesResponse() + post_with_metadata.return_value = service.ListLakesResponse(), metadata + + client.list_lakes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_lake_rest_bad_request(request_type=service.GetLakeRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_lake(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetLakeRequest, + dict, +]) +def test_get_lake_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Lake( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + service_account='service_account_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Lake.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_lake(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Lake) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.service_account == 'service_account_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_lake_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_lake") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_lake_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_lake") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetLakeRequest.pb(service.GetLakeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.Lake.to_json(resources.Lake()) + req.return_value.content = return_value + + request = service.GetLakeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Lake() + post_with_metadata.return_value = resources.Lake(), metadata + + client.get_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_lake_actions_rest_bad_request(request_type=service.ListLakeActionsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_lake_actions(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListLakeActionsRequest, + dict, +]) +def test_list_lake_actions_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_lake_actions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLakeActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_lake_actions_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lake_actions") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lake_actions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_lake_actions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListLakeActionsRequest.pb(service.ListLakeActionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) + req.return_value.content = return_value + + request = service.ListLakeActionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListActionsResponse() + post_with_metadata.return_value = service.ListActionsResponse(), metadata + + client.list_lake_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_zone_rest_bad_request(request_type=service.CreateZoneRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_zone(request) + + +@pytest.mark.parametrize("request_type", [ + service.CreateZoneRequest, + dict, +]) +def test_create_zone_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request_init["zone"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'type_': 1, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'resource_spec': {'location_type': 1}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateZoneRequest.meta.fields["zone"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["zone"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["zone"][field])): + del request_init["zone"][field][i][subfield] + else: + del request_init["zone"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_zone(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_zone_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_zone") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_zone_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_zone") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateZoneRequest.pb(service.CreateZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.CreateZoneRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_zone_rest_bad_request(request_type=service.UpdateZoneRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_zone(request) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateZoneRequest, + dict, +]) +def test_update_zone_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} + request_init["zone"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'type_': 1, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'resource_spec': {'location_type': 1}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateZoneRequest.meta.fields["zone"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["zone"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["zone"][field])): + del request_init["zone"][field][i][subfield] + else: + del request_init["zone"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_zone(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_zone_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_zone") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_zone_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_zone") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateZoneRequest.pb(service.UpdateZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateZoneRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_zone_rest_bad_request(request_type=service.DeleteZoneRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_zone(request) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteZoneRequest, + dict, +]) +def test_delete_zone_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_zone(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_zone_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_zone") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_zone_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_zone") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteZoneRequest.pb(service.DeleteZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteZoneRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_zones_rest_bad_request(request_type=service.ListZonesRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_zones(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListZonesRequest, + dict, +]) +def test_list_zones_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListZonesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListZonesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_zones(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZonesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_zones_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zones") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zones_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_zones") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListZonesRequest.pb(service.ListZonesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListZonesResponse.to_json(service.ListZonesResponse()) + req.return_value.content = return_value + + request = service.ListZonesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListZonesResponse() + post_with_metadata.return_value = service.ListZonesResponse(), metadata + + client.list_zones(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_zone_rest_bad_request(request_type=service.GetZoneRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_zone(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetZoneRequest, + dict, +]) +def test_get_zone_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Zone( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + type_=resources.Zone.Type.RAW, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_zone(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Zone) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + assert response.type_ == resources.Zone.Type.RAW + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_zone_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_zone") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_zone_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_zone") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetZoneRequest.pb(service.GetZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.Zone.to_json(resources.Zone()) + req.return_value.content = return_value + + request = service.GetZoneRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Zone() + post_with_metadata.return_value = resources.Zone(), metadata + + client.get_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_zone_actions_rest_bad_request(request_type=service.ListZoneActionsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_zone_actions(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListZoneActionsRequest, + dict, +]) +def test_list_zone_actions_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_zone_actions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListZoneActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_zone_actions_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zone_actions") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zone_actions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_zone_actions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListZoneActionsRequest.pb(service.ListZoneActionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) + req.return_value.content = return_value + + request = service.ListZoneActionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListActionsResponse() + post_with_metadata.return_value = service.ListActionsResponse(), metadata + + client.list_zone_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_asset_rest_bad_request(request_type=service.CreateAssetRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_asset(request) + + +@pytest.mark.parametrize("request_type", [ + service.CreateAssetRequest, + dict, +]) +def test_create_asset_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request_init["asset"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'resource_spec': {'name': 'name_value', 'type_': 1, 'read_access_mode': 1}, 'resource_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'managed_access_identity': 'managed_access_identity_value'}, 'security_status': {'state': 1, 'message': 'message_value', 'update_time': {}}, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'discovery_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'last_run_time': {}, 'stats': {'data_items': 1051, 'data_size': 948, 'tables': 635, 'filesets': 863}, 'last_run_duration': {'seconds': 751, 'nanos': 543}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateAssetRequest.meta.fields["asset"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["asset"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["asset"][field])): + del request_init["asset"][field][i][subfield] + else: + del request_init["asset"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_asset(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_asset_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_asset") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_asset_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_asset") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateAssetRequest.pb(service.CreateAssetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.CreateAssetRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_asset_rest_bad_request(request_type=service.UpdateAssetRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_asset(request) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateAssetRequest, + dict, +]) +def test_update_asset_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} + request_init["asset"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'resource_spec': {'name': 'name_value', 'type_': 1, 'read_access_mode': 1}, 'resource_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'managed_access_identity': 'managed_access_identity_value'}, 'security_status': {'state': 1, 'message': 'message_value', 'update_time': {}}, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'discovery_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'last_run_time': {}, 'stats': {'data_items': 1051, 'data_size': 948, 'tables': 635, 'filesets': 863}, 'last_run_duration': {'seconds': 751, 'nanos': 543}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateAssetRequest.meta.fields["asset"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["asset"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["asset"][field])): + del request_init["asset"][field][i][subfield] + else: + del request_init["asset"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_asset(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_asset_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_asset") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_asset_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_asset") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateAssetRequest.pb(service.UpdateAssetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateAssetRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_asset_rest_bad_request(request_type=service.DeleteAssetRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_asset(request) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteAssetRequest, + dict, +]) +def test_delete_asset_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_asset(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_asset_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_asset") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_asset_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_asset") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteAssetRequest.pb(service.DeleteAssetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteAssetRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_assets_rest_bad_request(request_type=service.ListAssetsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_assets(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListAssetsRequest.pb(service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListAssetsResponse.to_json(service.ListAssetsResponse()) + req.return_value.content = return_value + + request = service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListAssetsResponse() + post_with_metadata.return_value = service.ListAssetsResponse(), metadata + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_asset_rest_bad_request(request_type=service.GetAssetRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_asset(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetAssetRequest, + dict, +]) +def test_get_asset_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = resources.Asset( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Asset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_asset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Asset) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_asset_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_asset") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_asset_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_asset") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetAssetRequest.pb(service.GetAssetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.Asset.to_json(resources.Asset()) + req.return_value.content = return_value + + request = service.GetAssetRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Asset() + post_with_metadata.return_value = resources.Asset(), metadata + + client.get_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_asset_actions_rest_bad_request(request_type=service.ListAssetActionsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_asset_actions(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListAssetActionsRequest, + dict, +]) +def test_list_asset_actions_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListActionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListActionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_asset_actions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetActionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_asset_actions_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_asset_actions") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_asset_actions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_asset_actions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListAssetActionsRequest.pb(service.ListAssetActionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) + req.return_value.content = return_value + + request = service.ListAssetActionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListActionsResponse() + post_with_metadata.return_value = service.ListActionsResponse(), metadata + + client.list_asset_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_task_rest_bad_request(request_type=service.CreateTaskRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_task(request) + + +@pytest.mark.parametrize("request_type", [ + service.CreateTaskRequest, + dict, +]) +def test_create_task_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request_init["task"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'state': 1, 'labels': {}, 'trigger_spec': {'type_': 1, 'start_time': {}, 'disabled': True, 'max_retries': 1187, 'schedule': 'schedule_value'}, 'execution_spec': {'args': {}, 'service_account': 'service_account_value', 'project': 'project_value', 'max_job_execution_lifetime': {'seconds': 751, 'nanos': 543}, 'kms_key': 'kms_key_value'}, 'execution_status': {'update_time': {}, 'latest_job': {'name': 'name_value', 'uid': 'uid_value', 'start_time': {}, 'end_time': {}, 'state': 1, 'retry_count': 1214, 'service': 1, 'service_job': 'service_job_value', 'message': 'message_value', 'labels': {}, 'trigger': 1, 'execution_spec': {}}}, 'spark': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'python_script_file': 'python_script_file_value', 'sql_script_file': 'sql_script_file_value', 'sql_script': 'sql_script_value', 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'infrastructure_spec': {'batch': {'executors_count': 1642, 'max_executors_count': 2063}, 'container_image': {'image': 'image_value', 'java_jars': ['java_jars_value1', 'java_jars_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}, 'vpc_network': {'network': 'network_value', 'sub_network': 'sub_network_value', 'network_tags': ['network_tags_value1', 'network_tags_value2']}}}, 'notebook': {'notebook': 'notebook_value', 'infrastructure_spec': {}, 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateTaskRequest.meta.fields["task"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["task"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["task"][field])): + del request_init["task"][field][i][subfield] + else: + del request_init["task"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_task(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_task_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_task") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_task_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_task") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateTaskRequest.pb(service.CreateTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.CreateTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_task_rest_bad_request(request_type=service.UpdateTaskRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_task(request) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateTaskRequest, + dict, +]) +def test_update_task_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} + request_init["task"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'state': 1, 'labels': {}, 'trigger_spec': {'type_': 1, 'start_time': {}, 'disabled': True, 'max_retries': 1187, 'schedule': 'schedule_value'}, 'execution_spec': {'args': {}, 'service_account': 'service_account_value', 'project': 'project_value', 'max_job_execution_lifetime': {'seconds': 751, 'nanos': 543}, 'kms_key': 'kms_key_value'}, 'execution_status': {'update_time': {}, 'latest_job': {'name': 'name_value', 'uid': 'uid_value', 'start_time': {}, 'end_time': {}, 'state': 1, 'retry_count': 1214, 'service': 1, 'service_job': 'service_job_value', 'message': 'message_value', 'labels': {}, 'trigger': 1, 'execution_spec': {}}}, 'spark': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'python_script_file': 'python_script_file_value', 'sql_script_file': 'sql_script_file_value', 'sql_script': 'sql_script_value', 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'infrastructure_spec': {'batch': {'executors_count': 1642, 'max_executors_count': 2063}, 'container_image': {'image': 'image_value', 'java_jars': ['java_jars_value1', 'java_jars_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}, 'vpc_network': {'network': 'network_value', 'sub_network': 'sub_network_value', 'network_tags': ['network_tags_value1', 'network_tags_value2']}}}, 'notebook': {'notebook': 'notebook_value', 'infrastructure_spec': {}, 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateTaskRequest.meta.fields["task"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["task"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["task"][field])): + del request_init["task"][field][i][subfield] + else: + del request_init["task"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_task(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_task_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_task") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_task_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_task") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateTaskRequest.pb(service.UpdateTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_task_rest_bad_request(request_type=service.DeleteTaskRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_task(request) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteTaskRequest, + dict, +]) +def test_delete_task_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_task(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_task_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_task") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_task_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_task") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteTaskRequest.pb(service.DeleteTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_tasks_rest_bad_request(request_type=service.ListTasksRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_tasks(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListTasksRequest, + dict, +]) +def test_list_tasks_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTasksResponse( + next_page_token='next_page_token_value', + unreachable_locations=['unreachable_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_locations == ['unreachable_locations_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tasks_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_tasks") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_tasks_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_tasks") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListTasksRequest.pb(service.ListTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListTasksResponse.to_json(service.ListTasksResponse()) + req.return_value.content = return_value + + request = service.ListTasksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListTasksResponse() + post_with_metadata.return_value = service.ListTasksResponse(), metadata + + client.list_tasks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_task_rest_bad_request(request_type=service.GetTaskRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_task(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetTaskRequest, + dict, +]) +def test_get_task_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = tasks.Task( + name='name_value', + uid='uid_value', + description='description_value', + display_name='display_name_value', + state=resources.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tasks.Task.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Task) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_task_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_task") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_task_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_task") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetTaskRequest.pb(service.GetTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = tasks.Task.to_json(tasks.Task()) + req.return_value.content = return_value + + request = service.GetTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tasks.Task() + post_with_metadata.return_value = tasks.Task(), metadata + + client.get_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_jobs_rest_bad_request(request_type=service.ListJobsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListJobsRequest, + dict, +]) +def test_list_jobs_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_jobs") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListJobsRequest.pb(service.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListJobsResponse.to_json(service.ListJobsResponse()) + req.return_value.content = return_value + + request = service.ListJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListJobsResponse() + post_with_metadata.return_value = service.ListJobsResponse(), metadata + + client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_run_task_rest_bad_request(request_type=service.RunTaskRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.run_task(request) + + +@pytest.mark.parametrize("request_type", [ + service.RunTaskRequest, + dict, +]) +def test_run_task_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.RunTaskResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.RunTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.run_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.RunTaskResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_task_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_run_task") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_run_task_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_run_task") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.RunTaskRequest.pb(service.RunTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.RunTaskResponse.to_json(service.RunTaskResponse()) + req.return_value.content = return_value + + request = service.RunTaskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.RunTaskResponse() + post_with_metadata.return_value = service.RunTaskResponse(), metadata + + client.run_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_job_rest_bad_request(request_type=service.GetJobRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_job(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetJobRequest, + dict, +]) +def test_get_job_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = tasks.Job( + name='name_value', + uid='uid_value', + state=tasks.Job.State.RUNNING, + retry_count=1214, + service=tasks.Job.Service.DATAPROC, + service_job='service_job_value', + message='message_value', + trigger=tasks.Job.Trigger.TASK_CONFIG, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tasks.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tasks.Job) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.state == tasks.Job.State.RUNNING + assert response.retry_count == 1214 + assert response.service == tasks.Job.Service.DATAPROC + assert response.service_job == 'service_job_value' + assert response.message == 'message_value' + assert response.trigger == tasks.Job.Trigger.TASK_CONFIG + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_job") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_job") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetJobRequest.pb(service.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = tasks.Job.to_json(tasks.Job()) + req.return_value.content = return_value + + request = service.GetJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tasks.Job() + post_with_metadata.return_value = tasks.Job(), metadata + + client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_job_rest_bad_request(request_type=service.CancelJobRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_job(request) + + +@pytest.mark.parametrize("request_type", [ + service.CancelJobRequest, + dict, +]) +def test_cancel_job_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_job_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_cancel_job") as pre: + pre.assert_not_called() + pb_message = service.CancelJobRequest.pb(service.CancelJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = service.CancelJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_create_environment_rest_bad_request(request_type=service.CreateEnvironmentRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_environment(request) + + +@pytest.mark.parametrize("request_type", [ + service.CreateEnvironmentRequest, + dict, +]) +def test_create_environment_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request_init["environment"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'infrastructure_spec': {'compute': {'disk_size_gb': 1261, 'node_count': 1070, 'max_node_count': 1491}, 'os_image': {'image_version': 'image_version_value', 'java_libraries': ['java_libraries_value1', 'java_libraries_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}}, 'session_spec': {'max_idle_duration': {'seconds': 751, 'nanos': 543}, 'enable_fast_startup': True}, 'session_status': {'active': True}, 'endpoints': {'notebooks': 'notebooks_value', 'sql': 'sql_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateEnvironmentRequest.meta.fields["environment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["environment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["environment"][field])): + del request_init["environment"][field][i][subfield] + else: + del request_init["environment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_environment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_environment_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_environment") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_environment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_environment") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateEnvironmentRequest.pb(service.CreateEnvironmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.CreateEnvironmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_environment_rest_bad_request(request_type=service.UpdateEnvironmentRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_environment(request) + + +@pytest.mark.parametrize("request_type", [ + service.UpdateEnvironmentRequest, + dict, +]) +def test_update_environment_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} + request_init["environment"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'infrastructure_spec': {'compute': {'disk_size_gb': 1261, 'node_count': 1070, 'max_node_count': 1491}, 'os_image': {'image_version': 'image_version_value', 'java_libraries': ['java_libraries_value1', 'java_libraries_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}}, 'session_spec': {'max_idle_duration': {'seconds': 751, 'nanos': 543}, 'enable_fast_startup': True}, 'session_status': {'active': True}, 'endpoints': {'notebooks': 'notebooks_value', 'sql': 'sql_value'}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateEnvironmentRequest.meta.fields["environment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["environment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["environment"][field])): + del request_init["environment"][field][i][subfield] + else: + del request_init["environment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_environment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_environment_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_environment") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_environment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_environment") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateEnvironmentRequest.pb(service.UpdateEnvironmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateEnvironmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_environment_rest_bad_request(request_type=service.DeleteEnvironmentRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_environment(request) + + +@pytest.mark.parametrize("request_type", [ + service.DeleteEnvironmentRequest, + dict, +]) +def test_delete_environment_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_environment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_environment_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_environment") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_environment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_environment") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.DeleteEnvironmentRequest.pb(service.DeleteEnvironmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.DeleteEnvironmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_environments_rest_bad_request(request_type=service.ListEnvironmentsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_environments(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListEnvironmentsRequest, + dict, +]) +def test_list_environments_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListEnvironmentsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_environments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_environments_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_environments") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_environments_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_environments") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListEnvironmentsRequest.pb(service.ListEnvironmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListEnvironmentsResponse.to_json(service.ListEnvironmentsResponse()) + req.return_value.content = return_value + + request = service.ListEnvironmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListEnvironmentsResponse() + post_with_metadata.return_value = service.ListEnvironmentsResponse(), metadata + + client.list_environments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_environment_rest_bad_request(request_type=service.GetEnvironmentRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_environment(request) + + +@pytest.mark.parametrize("request_type", [ + service.GetEnvironmentRequest, + dict, +]) +def test_get_environment_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = analyze.Environment( + name='name_value', + display_name='display_name_value', + uid='uid_value', + description='description_value', + state=resources.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analyze.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_environment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.description == 'description_value' + assert response.state == resources.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_environment_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_environment") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_environment_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_environment") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetEnvironmentRequest.pb(service.GetEnvironmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = analyze.Environment.to_json(analyze.Environment()) + req.return_value.content = return_value + + request = service.GetEnvironmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analyze.Environment() + post_with_metadata.return_value = analyze.Environment(), metadata + + client.get_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_sessions_rest_bad_request(request_type=service.ListSessionsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sessions(request) + + +@pytest.mark.parametrize("request_type", [ + service.ListSessionsRequest, + dict, +]) +def test_list_sessions_rest_call_success(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = service.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sessions_rest_interceptors(null_interceptor): + transport = transports.DataplexServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), + ) + client = DataplexServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_sessions") as post, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_sessions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListSessionsRequest.pb(service.ListSessionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListSessionsResponse.to_json(service.ListSessionsResponse()) + req.return_value.content = return_value + + request = service.ListSessionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListSessionsResponse() + post_with_metadata.return_value = service.ListSessionsResponse(), metadata + + client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lake_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lake), + '__call__') as call: + client.create_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lake_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lake), + '__call__') as call: + client.update_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lake_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lake), + '__call__') as call: + client.delete_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lakes_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lakes), + '__call__') as call: + client.list_lakes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lake_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lake), + '__call__') as call: + client.get_lake(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetLakeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lake_actions_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lake_actions), + '__call__') as call: + client.list_lake_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListLakeActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_zone_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_zone), + '__call__') as call: + client.create_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_zone_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_zone), + '__call__') as call: + client.update_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_zone_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_zone), + '__call__') as call: + client.delete_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zones_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zones), + '__call__') as call: + client.list_zones(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZonesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_zone_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_zone), + '__call__') as call: + client.get_zone(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetZoneRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_zone_actions_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_zone_actions), + '__call__') as call: + client.list_zone_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListZoneActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_asset_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_asset), + '__call__') as call: + client.create_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_asset_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_asset), + '__call__') as call: + client.update_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_asset_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_asset), + '__call__') as call: + client.delete_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_asset_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_asset), + '__call__') as call: + client.get_asset(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetAssetRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_asset_actions_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_asset_actions), + '__call__') as call: + client.list_asset_actions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListAssetActionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_task_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + client.create_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_task_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_task), + '__call__') as call: + client.update_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_task_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + client.delete_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tasks_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + client.list_tasks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListTasksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_task_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + client.get_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_jobs_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_task_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + client.run_task(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.RunTaskRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_job_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_job), + '__call__') as call: + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CancelJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_environment_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), + '__call__') as call: + client.create_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_environment_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), + '__call__') as call: + client.update_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_environment_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), + '__call__') as call: + client.delete_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_environments_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + '__call__') as call: + client.list_environments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListEnvironmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_environment_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_environment), + '__call__') as call: + client.get_environment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetEnvironmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListSessionsRequest() + + assert args[0] == request_msg + + +def test_dataplex_service_rest_lro_client(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataplexServiceGrpcTransport, + ) + +def test_dataplex_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataplexServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dataplex_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataplexServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_lake', + 'update_lake', + 'delete_lake', + 'list_lakes', + 'get_lake', + 'list_lake_actions', + 'create_zone', + 'update_zone', + 'delete_zone', + 'list_zones', + 'get_zone', + 'list_zone_actions', + 'create_asset', + 'update_asset', + 'delete_asset', + 'list_assets', + 'get_asset', + 'list_asset_actions', + 'create_task', + 'update_task', + 'delete_task', + 'list_tasks', + 'get_task', + 'list_jobs', + 'run_task', + 'get_job', + 'cancel_job', + 'create_environment', + 'update_environment', + 'delete_environment', + 'list_environments', + 'get_environment', + 'list_sessions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dataplex_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataplexServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_dataplex_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataplexServiceTransport() + adc.assert_called_once() + + +def test_dataplex_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataplexServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, + ], +) +def test_dataplex_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataplexServiceGrpcTransport, + transports.DataplexServiceGrpcAsyncIOTransport, + transports.DataplexServiceRestTransport, + ], +) +def test_dataplex_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataplexServiceGrpcTransport, grpc_helpers), + (transports.DataplexServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dataplex_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_dataplex_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DataplexServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dataplex_service_host_no_port(transport_name): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_dataplex_service_host_with_port(transport_name): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_dataplex_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataplexServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataplexServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_lake._session + session2 = client2.transport.create_lake._session + assert session1 != session2 + session1 = client1.transport.update_lake._session + session2 = client2.transport.update_lake._session + assert session1 != session2 + session1 = client1.transport.delete_lake._session + session2 = client2.transport.delete_lake._session + assert session1 != session2 + session1 = client1.transport.list_lakes._session + session2 = client2.transport.list_lakes._session + assert session1 != session2 + session1 = client1.transport.get_lake._session + session2 = client2.transport.get_lake._session + assert session1 != session2 + session1 = client1.transport.list_lake_actions._session + session2 = client2.transport.list_lake_actions._session + assert session1 != session2 + session1 = client1.transport.create_zone._session + session2 = client2.transport.create_zone._session + assert session1 != session2 + session1 = client1.transport.update_zone._session + session2 = client2.transport.update_zone._session + assert session1 != session2 + session1 = client1.transport.delete_zone._session + session2 = client2.transport.delete_zone._session + assert session1 != session2 + session1 = client1.transport.list_zones._session + session2 = client2.transport.list_zones._session + assert session1 != session2 + session1 = client1.transport.get_zone._session + session2 = client2.transport.get_zone._session + assert session1 != session2 + session1 = client1.transport.list_zone_actions._session + session2 = client2.transport.list_zone_actions._session + assert session1 != session2 + session1 = client1.transport.create_asset._session + session2 = client2.transport.create_asset._session + assert session1 != session2 + session1 = client1.transport.update_asset._session + session2 = client2.transport.update_asset._session + assert session1 != session2 + session1 = client1.transport.delete_asset._session + session2 = client2.transport.delete_asset._session + assert session1 != session2 + session1 = client1.transport.list_assets._session + session2 = client2.transport.list_assets._session + assert session1 != session2 + session1 = client1.transport.get_asset._session + session2 = client2.transport.get_asset._session + assert session1 != session2 + session1 = client1.transport.list_asset_actions._session + session2 = client2.transport.list_asset_actions._session + assert session1 != session2 + session1 = client1.transport.create_task._session + session2 = client2.transport.create_task._session + assert session1 != session2 + session1 = client1.transport.update_task._session + session2 = client2.transport.update_task._session + assert session1 != session2 + session1 = client1.transport.delete_task._session + session2 = client2.transport.delete_task._session + assert session1 != session2 + session1 = client1.transport.list_tasks._session + session2 = client2.transport.list_tasks._session + assert session1 != session2 + session1 = client1.transport.get_task._session + session2 = client2.transport.get_task._session + assert session1 != session2 + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.run_task._session + session2 = client2.transport.run_task._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 + session1 = client1.transport.create_environment._session + session2 = client2.transport.create_environment._session + assert session1 != session2 + session1 = client1.transport.update_environment._session + session2 = client2.transport.update_environment._session + assert session1 != session2 + session1 = client1.transport.delete_environment._session + session2 = client2.transport.delete_environment._session + assert session1 != session2 + session1 = client1.transport.list_environments._session + session2 = client2.transport.list_environments._session + assert session1 != session2 + session1 = client1.transport.get_environment._session + session2 = client2.transport.get_environment._session + assert session1 != session2 + session1 = client1.transport.list_sessions._session + session2 = client2.transport.list_sessions._session + assert session1 != session2 +def test_dataplex_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataplexServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dataplex_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataplexServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) +def test_dataplex_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dataplex_service_grpc_lro_client(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_dataplex_service_grpc_lro_async_client(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_action_path(): + project = "squid" + location = "clam" + lake = "whelk" + action = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) + actual = DataplexServiceClient.action_path(project, location, lake, action) + assert expected == actual + + +def test_parse_action_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "action": "mussel", + } + path = DataplexServiceClient.action_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_action_path(path) + assert expected == actual + +def test_asset_path(): + project = "winkle" + location = "nautilus" + lake = "scallop" + zone = "abalone" + asset = "squid" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) + actual = DataplexServiceClient.asset_path(project, location, lake, zone, asset) + assert expected == actual + + +def test_parse_asset_path(): + expected = { + "project": "clam", + "location": "whelk", + "lake": "octopus", + "zone": "oyster", + "asset": "nudibranch", + } + path = DataplexServiceClient.asset_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_asset_path(path) + assert expected == actual + +def test_environment_path(): + project = "cuttlefish" + location = "mussel" + lake = "winkle" + environment = "nautilus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) + actual = DataplexServiceClient.environment_path(project, location, lake, environment) + assert expected == actual + + +def test_parse_environment_path(): + expected = { + "project": "scallop", + "location": "abalone", + "lake": "squid", + "environment": "clam", + } + path = DataplexServiceClient.environment_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_environment_path(path) + assert expected == actual + +def test_job_path(): + project = "whelk" + location = "octopus" + lake = "oyster" + task = "nudibranch" + job = "cuttlefish" + expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) + actual = DataplexServiceClient.job_path(project, location, lake, task, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "mussel", + "location": "winkle", + "lake": "nautilus", + "task": "scallop", + "job": "abalone", + } + path = DataplexServiceClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_job_path(path) + assert expected == actual + +def test_lake_path(): + project = "squid" + location = "clam" + lake = "whelk" + expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) + actual = DataplexServiceClient.lake_path(project, location, lake) + assert expected == actual + + +def test_parse_lake_path(): + expected = { + "project": "octopus", + "location": "oyster", + "lake": "nudibranch", + } + path = DataplexServiceClient.lake_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_lake_path(path) + assert expected == actual + +def test_session_path(): + project = "cuttlefish" + location = "mussel" + lake = "winkle" + environment = "nautilus" + session = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) + actual = DataplexServiceClient.session_path(project, location, lake, environment, session) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + "environment": "whelk", + "session": "octopus", + } + path = DataplexServiceClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_session_path(path) + assert expected == actual + +def test_task_path(): + project = "oyster" + location = "nudibranch" + lake = "cuttlefish" + task = "mussel" + expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) + actual = DataplexServiceClient.task_path(project, location, lake, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "lake": "scallop", + "task": "abalone", + } + path = DataplexServiceClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_task_path(path) + assert expected == actual + +def test_zone_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + actual = DataplexServiceClient.zone_path(project, location, lake, zone) + assert expected == actual + + +def test_parse_zone_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "zone": "mussel", + } + path = DataplexServiceClient.zone_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_zone_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataplexServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DataplexServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataplexServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DataplexServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataplexServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DataplexServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DataplexServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DataplexServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataplexServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DataplexServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DataplexServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataplexServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataplexServiceClient, transports.DataplexServiceGrpcTransport), + (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py new file mode 100644 index 000000000000..e151c6f1dc97 --- /dev/null +++ b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -0,0 +1,9404 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceAsyncClient +from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceClient +from google.cloud.dataplex_v1.services.metadata_service import pagers +from google.cloud.dataplex_v1.services.metadata_service import transports +from google.cloud.dataplex_v1.types import metadata_ +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetadataServiceClient._get_default_mtls_endpoint(None) is None + assert MetadataServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetadataServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetadataServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + MetadataServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetadataServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MetadataServiceClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MetadataServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MetadataServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MetadataServiceClient._get_client_cert_source(None, False) is None + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert MetadataServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert MetadataServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT + assert MetadataServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert MetadataServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert MetadataServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert MetadataServiceClient._get_universe_domain(None, None) == MetadataServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + MetadataServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetadataServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetadataServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetadataServiceClient, "grpc"), + (MetadataServiceAsyncClient, "grpc_asyncio"), + (MetadataServiceClient, "rest"), +]) +def test_metadata_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetadataServiceGrpcTransport, "grpc"), + (transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetadataServiceRestTransport, "rest"), +]) +def test_metadata_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetadataServiceClient, "grpc"), + (MetadataServiceAsyncClient, "grpc_asyncio"), + (MetadataServiceClient, "rest"), +]) +def test_metadata_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataplex.googleapis.com' + ) + + +def test_metadata_service_client_get_transport_class(): + transport = MetadataServiceClient.get_transport_class() + available_transports = [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceRestTransport, + ] + assert transport in available_transports + + transport = MetadataServiceClient.get_transport_class("grpc") + assert transport == transports.MetadataServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest"), +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test_metadata_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "true"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "false"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", "true"), + (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metadata_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MetadataServiceClient, MetadataServiceAsyncClient +]) +@mock.patch.object(MetadataServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceAsyncClient)) +def test_metadata_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + MetadataServiceClient, MetadataServiceAsyncClient +]) +@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) +@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) +def test_metadata_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MetadataServiceClient._DEFAULT_UNIVERSE + default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest"), +]) +def test_metadata_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", None), +]) +def test_metadata_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_metadata_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetadataServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_metadata_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.CreateEntityRequest, + dict, +]) +def test_create_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.CreateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_create_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.CreateEntityRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreateEntityRequest( + parent='parent_value', + ) + +def test_create_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc + request = {} + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_entity] = mock_rpc + + request = {} + await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreateEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.CreateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_create_entity_async_from_dict(): + await test_create_entity_async(request_type=dict) + +def test_create_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreateEntityRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreateEntityRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entity( + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name='name_value') + assert arg == mock_val + + +def test_create_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entity( + metadata_.CreateEntityRequest(), + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entity( + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entity( + metadata_.CreateEntityRequest(), + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.UpdateEntityRequest, + dict, +]) +def test_update_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.UpdateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_update_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.UpdateEntityRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.UpdateEntityRequest( + ) + +def test_update_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc + request = {} + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_entity] = mock_rpc + + request = {} + await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.UpdateEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.UpdateEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_update_entity_async_from_dict(): + await test_update_entity_async(request_type=dict) + +def test_update_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.UpdateEntityRequest() + + request.entity.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entity.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.UpdateEntityRequest() + + request.entity.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entity.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeleteEntityRequest, + dict, +]) +def test_delete_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.DeleteEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.DeleteEntityRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeleteEntityRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc + request = {} + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_entity] = mock_rpc + + request = {} + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeleteEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.DeleteEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entity_async_from_dict(): + await test_delete_entity_async(request_type=dict) + +def test_delete_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = None + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entity( + metadata_.DeleteEntityRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entity( + metadata_.DeleteEntityRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetEntityRequest, + dict, +]) +def test_get_entity(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + response = client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.GetEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +def test_get_entity_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.GetEntityRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_entity(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetEntityRequest( + name='name_value', + ) + +def test_get_entity_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc + request = {} + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_entity in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_entity] = mock_rpc + + request = {} + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetEntityRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + response = await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.GetEntityRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.asyncio +async def test_get_entity_async_from_dict(): + await test_get_entity_async(request_type=dict) + +def test_get_entity_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entity_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entity_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity( + metadata_.GetEntityRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entity( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entity( + metadata_.GetEntityRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListEntitiesRequest, + dict, +]) +def test_list_entities(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.ListEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entities_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.ListEntitiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_entities(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListEntitiesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_entities_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc + request = {} + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entities_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_entities in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_entities] = mock_rpc + + request = {} + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_entities_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListEntitiesRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.ListEntitiesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entities_async_from_dict(): + await test_list_entities_async(request_type=dict) + +def test_list_entities_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = metadata_.ListEntitiesResponse() + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entities_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entities_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entities( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entities_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entities( + metadata_.ListEntitiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entities_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entities( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entities_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entities( + metadata_.ListEntitiesRequest(), + parent='parent_value', + ) + + +def test_list_entities_pager(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entities(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Entity) + for i in results) +def test_list_entities_pages(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entities(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entities_async_pager(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entities(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metadata_.Entity) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entities_async_pages(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entities(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + metadata_.CreatePartitionRequest, + dict, +]) +def test_create_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + response = client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.CreatePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +def test_create_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.CreatePartitionRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreatePartitionRequest( + parent='parent_value', + ) + +def test_create_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc + request = {} + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_partition] = mock_rpc + + request = {} + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreatePartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + response = await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.CreatePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_create_partition_async_from_dict(): + await test_create_partition_async(request_type=dict) + +def test_create_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_partition( + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name='name_value') + assert arg == mock_val + + +def test_create_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partition( + metadata_.CreatePartitionRequest(), + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_partition( + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_partition( + metadata_.CreatePartitionRequest(), + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeletePartitionRequest, + dict, +]) +def test_delete_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.DeletePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.DeletePartitionRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeletePartitionRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc + request = {} + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_partition] = mock_rpc + + request = {} + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeletePartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.DeletePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_partition_async_from_dict(): + await test_delete_partition_async(request_type=dict) + +def test_delete_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = None + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partition( + metadata_.DeletePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_partition( + metadata_.DeletePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetPartitionRequest, + dict, +]) +def test_get_partition(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + response = client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.GetPartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +def test_get_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.GetPartitionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetPartitionRequest( + name='name_value', + ) + +def test_get_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc + request = {} + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_partition] = mock_rpc + + request = {} + await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetPartitionRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + response = await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.GetPartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_partition_async_from_dict(): + await test_get_partition_async(request_type=dict) + +def test_get_partition_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetPartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetPartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + await client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_partition_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_partition_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partition( + metadata_.GetPartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_partition( + metadata_.GetPartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListPartitionsRequest, + dict, +]) +def test_list_partitions(request_type, transport: str = 'grpc'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = metadata_.ListPartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartitionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_partitions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = metadata_.ListPartitionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_partitions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListPartitionsRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc + request = {} + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_partitions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_partitions] = mock_rpc + + request = {} + await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_partitions_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListPartitionsRequest): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = metadata_.ListPartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartitionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_partitions_async_from_dict(): + await test_list_partitions_async(request_type=dict) + +def test_list_partitions_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListPartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = metadata_.ListPartitionsResponse() + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_partitions_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListPartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) + await client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_partitions_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_partitions_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partitions( + metadata_.ListPartitionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_partitions_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListPartitionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_partitions_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_partitions( + metadata_.ListPartitionsRequest(), + parent='parent_value', + ) + + +def test_list_partitions_pager(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_partitions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Partition) + for i in results) +def test_list_partitions_pages(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + pages = list(client.list_partitions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_partitions_async_pager(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_partitions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metadata_.Partition) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_partitions_async_pages(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_partitions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_entity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc + + request = {} + client.create_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entity_rest_required_fields(request_type=metadata_.CreateEntityRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entity._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_entity(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_entity_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_entity._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "entity", ))) + + +def test_create_entity_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_entity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities" % client.transport._host, args[1]) + + +def test_create_entity_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entity( + metadata_.CreateEntityRequest(), + parent='parent_value', + entity=metadata_.Entity(name='name_value'), + ) + + +def test_update_entity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc + + request = {} + client.update_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entity_rest_required_fields(request_type=metadata_.UpdateEntityRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entity._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_entity(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_entity_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_entity._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("entity", ))) + + +def test_delete_entity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc + + request = {} + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_entity_rest_required_fields(request_type=metadata_.DeleteEntityRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["etag"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "etag" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "etag" in jsonified_request + assert jsonified_request["etag"] == request_init["etag"] + + jsonified_request["name"] = 'name_value' + jsonified_request["etag"] = 'etag_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entity._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "etag" in jsonified_request + assert jsonified_request["etag"] == 'etag_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_entity(request) + + expected_params = [ + ( + "etag", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_entity_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_entity._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", "etag", ))) + + +def test_delete_entity_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}" % client.transport._host, args[1]) + + +def test_delete_entity_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entity( + metadata_.DeleteEntityRequest(), + name='name_value', + ) + + +def test_get_entity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entity in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc + + request = {} + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entity_rest_required_fields(request_type=metadata_.GetEntityRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entity._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_entity(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_entity_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_entity._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view", )) & set(("name", ))) + + +def test_get_entity_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_entity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}" % client.transport._host, args[1]) + + +def test_get_entity_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity( + metadata_.GetEntityRequest(), + name='name_value', + ) + + +def test_list_entities_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entities in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc + + request = {} + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entities(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entities_rest_required_fields(request_type=metadata_.ListEntitiesRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entities._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", "view", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.ListEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.ListEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_entities(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_entities_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "view", )) & set(("parent", "view", ))) + + +def test_list_entities_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.ListEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.ListEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities" % client.transport._host, args[1]) + + +def test_list_entities_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entities( + metadata_.ListEntitiesRequest(), + parent='parent_value', + ) + + +def test_list_entities_rest_pager(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + metadata_.Entity(), + ], + next_page_token='abc', + ), + metadata_.ListEntitiesResponse( + entities=[], + next_page_token='def', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + ], + next_page_token='ghi', + ), + metadata_.ListEntitiesResponse( + entities=[ + metadata_.Entity(), + metadata_.Entity(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metadata_.ListEntitiesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + + pager = client.list_entities(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Entity) + for i in results) + + pages = list(client.list_entities(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc + + request = {} + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_partition_rest_required_fields(request_type=metadata_.CreatePartitionRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_partition._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_partition_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "partition", ))) + + +def test_create_partition_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions" % client.transport._host, args[1]) + + +def test_create_partition_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partition( + metadata_.CreatePartitionRequest(), + parent='parent_value', + partition=metadata_.Partition(name='name_value'), + ) + + +def test_delete_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc + + request = {} + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_partition_rest_required_fields(request_type=metadata_.DeletePartitionRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_partition._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_partition_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_partition_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}" % client.transport._host, args[1]) + + +def test_delete_partition_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partition( + metadata_.DeletePartitionRequest(), + name='name_value', + ) + + +def test_get_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc + + request = {} + client.get_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_partition_rest_required_fields(request_type=metadata_.GetPartitionRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_partition_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_partition_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}" % client.transport._host, args[1]) + + +def test_get_partition_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partition( + metadata_.GetPartitionRequest(), + name='name_value', + ) + + +def test_list_partitions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc + + request = {} + client.list_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_partitions_rest_required_fields(request_type=metadata_.ListPartitionsRequest): + transport_class = transports.MetadataServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_partitions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_partitions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = metadata_.ListPartitionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.ListPartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_partitions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_partitions_rest_unset_required_fields(): + transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_partitions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_partitions_rest_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.ListPartitionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = metadata_.ListPartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_partitions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions" % client.transport._host, args[1]) + + +def test_list_partitions_rest_flattened_error(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partitions( + metadata_.ListPartitionsRequest(), + parent='parent_value', + ) + + +def test_list_partitions_rest_pager(transport: str = 'rest'): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + metadata_.Partition(), + ], + next_page_token='abc', + ), + metadata_.ListPartitionsResponse( + partitions=[], + next_page_token='def', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + ], + next_page_token='ghi', + ), + metadata_.ListPartitionsResponse( + partitions=[ + metadata_.Partition(), + metadata_.Partition(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metadata_.ListPartitionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + + pager = client.list_partitions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metadata_.Partition) + for i in results) + + pages = list(client.list_partitions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetadataServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetadataServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetadataServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetadataServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, + transports.MetadataServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = MetadataServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.create_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.update_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.UpdateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + call.return_value = None + client.delete_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeleteEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entity_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + call.return_value = metadata_.Entity() + client.get_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entities_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + call.return_value = metadata_.ListEntitiesResponse() + client.list_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.create_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreatePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + call.return_value = None + client.delete_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeletePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partition_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + call.return_value = metadata_.Partition() + client.get_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetPartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partitions_empty_call_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + call.return_value = metadata_.ListPartitionsResponse() + client.list_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListPartitionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MetadataServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.create_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.update_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.UpdateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeleteEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entity_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + )) + await client.get_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entities_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + )) + await client.list_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + await client.create_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreatePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeletePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partition_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + )) + await client.get_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetPartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_partitions_empty_call_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListPartitionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = MetadataServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_entity_rest_bad_request(request_type=metadata_.CreateEntityRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entity(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.CreateEntityRequest, + dict, +]) +def test_create_entity_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request_init["entity"] = {'name': 'name_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'id': 'id_value', 'etag': 'etag_value', 'type_': 1, 'asset': 'asset_value', 'data_path': 'data_path_value', 'data_path_pattern': 'data_path_pattern_value', 'catalog_entry': 'catalog_entry_value', 'system': 1, 'format_': {'format_': 1, 'compression_format': 2, 'mime_type': 'mime_type_value', 'csv': {'encoding': 'encoding_value', 'header_rows': 1171, 'delimiter': 'delimiter_value', 'quote': 'quote_value'}, 'json': {'encoding': 'encoding_value'}, 'iceberg': {'metadata_location': 'metadata_location_value'}}, 'compatibility': {'hive_metastore': {'compatible': True, 'reason': 'reason_value'}, 'bigquery': {}}, 'access': {'read': 1}, 'uid': 'uid_value', 'schema': {'user_managed': True, 'fields': [{'name': 'name_value', 'description': 'description_value', 'type_': 1, 'mode': 1, 'fields': {}}], 'partition_fields': [{'name': 'name_value', 'type_': 1}], 'partition_style': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = metadata_.CreateEntityRequest.meta.fields["entity"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entity"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity"][field])): + del request_init["entity"][field][i][subfield] + else: + del request_init["entity"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entity_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_entity") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_entity_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_create_entity") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.CreateEntityRequest.pb(metadata_.CreateEntityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.Entity.to_json(metadata_.Entity()) + req.return_value.content = return_value + + request = metadata_.CreateEntityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.Entity() + post_with_metadata.return_value = metadata_.Entity(), metadata + + client.create_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entity_rest_bad_request(request_type=metadata_.UpdateEntityRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'entity': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entity(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.UpdateEntityRequest, + dict, +]) +def test_update_entity_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'entity': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'}} + request_init["entity"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'id': 'id_value', 'etag': 'etag_value', 'type_': 1, 'asset': 'asset_value', 'data_path': 'data_path_value', 'data_path_pattern': 'data_path_pattern_value', 'catalog_entry': 'catalog_entry_value', 'system': 1, 'format_': {'format_': 1, 'compression_format': 2, 'mime_type': 'mime_type_value', 'csv': {'encoding': 'encoding_value', 'header_rows': 1171, 'delimiter': 'delimiter_value', 'quote': 'quote_value'}, 'json': {'encoding': 'encoding_value'}, 'iceberg': {'metadata_location': 'metadata_location_value'}}, 'compatibility': {'hive_metastore': {'compatible': True, 'reason': 'reason_value'}, 'bigquery': {}}, 'access': {'read': 1}, 'uid': 'uid_value', 'schema': {'user_managed': True, 'fields': [{'name': 'name_value', 'description': 'description_value', 'type_': 1, 'mode': 1, 'fields': {}}], 'partition_fields': [{'name': 'name_value', 'type_': 1}], 'partition_style': 1}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = metadata_.UpdateEntityRequest.meta.fields["entity"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entity"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity"][field])): + del request_init["entity"][field][i][subfield] + else: + del request_init["entity"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entity_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_update_entity") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_update_entity_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_update_entity") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.UpdateEntityRequest.pb(metadata_.UpdateEntityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.Entity.to_json(metadata_.Entity()) + req.return_value.content = return_value + + request = metadata_.UpdateEntityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.Entity() + post_with_metadata.return_value = metadata_.Entity(), metadata + + client.update_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entity_rest_bad_request(request_type=metadata_.DeleteEntityRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entity(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeleteEntityRequest, + dict, +]) +def test_delete_entity_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entity(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entity_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_delete_entity") as pre: + pre.assert_not_called() + pb_message = metadata_.DeleteEntityRequest.pb(metadata_.DeleteEntityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = metadata_.DeleteEntityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_entity_rest_bad_request(request_type=metadata_.GetEntityRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entity(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetEntityRequest, + dict, +]) +def test_get_entity_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Entity( + name='name_value', + display_name='display_name_value', + description='description_value', + id='id_value', + etag='etag_value', + type_=metadata_.Entity.Type.TABLE, + asset='asset_value', + data_path='data_path_value', + data_path_pattern='data_path_pattern_value', + catalog_entry='catalog_entry_value', + system=metadata_.StorageSystem.CLOUD_STORAGE, + uid='uid_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Entity.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.id == 'id_value' + assert response.etag == 'etag_value' + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == 'asset_value' + assert response.data_path == 'data_path_value' + assert response.data_path_pattern == 'data_path_pattern_value' + assert response.catalog_entry == 'catalog_entry_value' + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + assert response.uid == 'uid_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entity_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_entity") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_entity_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_get_entity") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.GetEntityRequest.pb(metadata_.GetEntityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.Entity.to_json(metadata_.Entity()) + req.return_value.content = return_value + + request = metadata_.GetEntityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.Entity() + post_with_metadata.return_value = metadata_.Entity(), metadata + + client.get_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entities_rest_bad_request(request_type=metadata_.ListEntitiesRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entities(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListEntitiesRequest, + dict, +]) +def test_list_entities_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.ListEntitiesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.ListEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entities_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_entities") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_entities_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_list_entities") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.ListEntitiesRequest.pb(metadata_.ListEntitiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.ListEntitiesResponse.to_json(metadata_.ListEntitiesResponse()) + req.return_value.content = return_value + + request = metadata_.ListEntitiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.ListEntitiesResponse() + post_with_metadata.return_value = metadata_.ListEntitiesResponse(), metadata + + client.list_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_partition_rest_bad_request(request_type=metadata_.CreatePartitionRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_partition(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.CreatePartitionRequest, + dict, +]) +def test_create_partition_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request_init["partition"] = {'name': 'name_value', 'values': ['values_value1', 'values_value2'], 'location': 'location_value', 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = metadata_.CreatePartitionRequest.meta.fields["partition"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["partition"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["partition"][field])): + del request_init["partition"][field][i][subfield] + else: + del request_init["partition"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_partition(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_partition_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_partition") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_create_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.CreatePartitionRequest.pb(metadata_.CreatePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.Partition.to_json(metadata_.Partition()) + req.return_value.content = return_value + + request = metadata_.CreatePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.Partition() + post_with_metadata.return_value = metadata_.Partition(), metadata + + client.create_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_partition_rest_bad_request(request_type=metadata_.DeletePartitionRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_partition(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.DeletePartitionRequest, + dict, +]) +def test_delete_partition_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_partition(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_partition_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_delete_partition") as pre: + pre.assert_not_called() + pb_message = metadata_.DeletePartitionRequest.pb(metadata_.DeletePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = metadata_.DeletePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_partition_rest_bad_request(request_type=metadata_.GetPartitionRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_partition(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.GetPartitionRequest, + dict, +]) +def test_get_partition_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.Partition( + name='name_value', + values=['values_value'], + location='location_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.Partition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_partition(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == 'name_value' + assert response.values == ['values_value'] + assert response.location == 'location_value' + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_partition_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_partition") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_get_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.GetPartitionRequest.pb(metadata_.GetPartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.Partition.to_json(metadata_.Partition()) + req.return_value.content = return_value + + request = metadata_.GetPartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.Partition() + post_with_metadata.return_value = metadata_.Partition(), metadata + + client.get_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_partitions_rest_bad_request(request_type=metadata_.ListPartitionsRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_partitions(request) + + +@pytest.mark.parametrize("request_type", [ + metadata_.ListPartitionsRequest, + dict, +]) +def test_list_partitions_rest_call_success(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metadata_.ListPartitionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = metadata_.ListPartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_partitions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartitionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_partitions_rest_interceptors(null_interceptor): + transport = transports.MetadataServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), + ) + client = MetadataServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_partitions") as post, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_partitions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_list_partitions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = metadata_.ListPartitionsRequest.pb(metadata_.ListPartitionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = metadata_.ListPartitionsResponse.to_json(metadata_.ListPartitionsResponse()) + req.return_value.content = return_value + + request = metadata_.ListPartitionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metadata_.ListPartitionsResponse() + post_with_metadata.return_value = metadata_.ListPartitionsResponse(), metadata + + client.list_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entity_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entity), + '__call__') as call: + client.create_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entity_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entity), + '__call__') as call: + client.update_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.UpdateEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entity_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entity), + '__call__') as call: + client.delete_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeleteEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entity_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_entity), + '__call__') as call: + client.get_entity(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetEntityRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entities_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), + '__call__') as call: + client.list_entities(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListEntitiesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partition_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partition), + '__call__') as call: + client.create_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.CreatePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partition_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partition), + '__call__') as call: + client.delete_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.DeletePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partition_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partition), + '__call__') as call: + client.get_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.GetPartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partitions_empty_call_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partitions), + '__call__') as call: + client.list_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = metadata_.ListPartitionsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetadataServiceGrpcTransport, + ) + +def test_metadata_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetadataServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metadata_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetadataServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_entity', + 'update_entity', + 'delete_entity', + 'get_entity', + 'list_entities', + 'create_partition', + 'delete_partition', + 'get_partition', + 'list_partitions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metadata_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetadataServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_metadata_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetadataServiceTransport() + adc.assert_called_once() + + +def test_metadata_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetadataServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, + ], +) +def test_metadata_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetadataServiceGrpcTransport, + transports.MetadataServiceGrpcAsyncIOTransport, + transports.MetadataServiceRestTransport, + ], +) +def test_metadata_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetadataServiceGrpcTransport, grpc_helpers), + (transports.MetadataServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metadata_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_metadata_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MetadataServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_metadata_service_host_no_port(transport_name): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_metadata_service_host_with_port(transport_name): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataplex.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataplex.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_metadata_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetadataServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MetadataServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_entity._session + session2 = client2.transport.create_entity._session + assert session1 != session2 + session1 = client1.transport.update_entity._session + session2 = client2.transport.update_entity._session + assert session1 != session2 + session1 = client1.transport.delete_entity._session + session2 = client2.transport.delete_entity._session + assert session1 != session2 + session1 = client1.transport.get_entity._session + session2 = client2.transport.get_entity._session + assert session1 != session2 + session1 = client1.transport.list_entities._session + session2 = client2.transport.list_entities._session + assert session1 != session2 + session1 = client1.transport.create_partition._session + session2 = client2.transport.create_partition._session + assert session1 != session2 + session1 = client1.transport.delete_partition._session + session2 = client2.transport.delete_partition._session + assert session1 != session2 + session1 = client1.transport.get_partition._session + session2 = client2.transport.get_partition._session + assert session1 != session2 + session1 = client1.transport.list_partitions._session + session2 = client2.transport.list_partitions._session + assert session1 != session2 +def test_metadata_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetadataServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metadata_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetadataServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) +def test_metadata_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_entity_path(): + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) + actual = MetadataServiceClient.entity_path(project, location, lake, zone, entity) + assert expected == actual + + +def test_parse_entity_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", + } + path = MetadataServiceClient.entity_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_entity_path(path) + assert expected == actual + +def test_partition_path(): + project = "scallop" + location = "abalone" + lake = "squid" + zone = "clam" + entity = "whelk" + partition = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) + actual = MetadataServiceClient.partition_path(project, location, lake, zone, entity, partition) + assert expected == actual + + +def test_parse_partition_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "zone": "mussel", + "entity": "winkle", + "partition": "nautilus", + } + path = MetadataServiceClient.partition_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_partition_path(path) + assert expected == actual + +def test_zone_path(): + project = "scallop" + location = "abalone" + lake = "squid" + zone = "clam" + expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) + actual = MetadataServiceClient.zone_path(project, location, lake, zone) + assert expected == actual + + +def test_parse_zone_path(): + expected = { + "project": "whelk", + "location": "octopus", + "lake": "oyster", + "zone": "nudibranch", + } + path = MetadataServiceClient.zone_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_zone_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetadataServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = MetadataServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetadataServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = MetadataServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetadataServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = MetadataServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = MetadataServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = MetadataServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetadataServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = MetadataServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetadataServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = MetadataServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetadataServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MetadataServiceClient, transports.MetadataServiceGrpcTransport), + (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 3ea08b1c93669900fdbb26c524c319c58a3070ac Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 22 Sep 2025 15:49:39 +0000 Subject: [PATCH 2/3] Apply post processing for google-cloud-dataplex --- .../client-post-processing/doc-formatting.yaml | 1 + scripts/client-post-processing/doc-formatting.yaml | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 120000 packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml diff --git a/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 89322bf519dc..5d8c7a975ca8 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -548,3 +548,17 @@ replacements: Set empty values to clear the metadata. Refer to documentation in count: 1 + - paths: [ + packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py, + ] + before: | + Examples of using a filter are: + \ -------------------------------\n + \ ``immediate_parent="projects/\{project_id_or_number\}/locations/\{location_id\}/glossaries/\{glossary_id\}"`` + \ -------------------------------------------------------------------------------------------------------\n + \ ``immediate_parent="projects/\{project_id_or_number\}/locations/\{location_id\}/glossaries/\{glossary_id\}/categories/\{category_id\}"`` + after: | + Examples of using a filter are:\n + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + count: 2 From dc2f9a1ff50bab1c853a120ca50c274d0467361d Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 22 Sep 2025 15:57:42 +0000 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-dataplex/v1/.coveragerc | 13 - .../google-cloud-dataplex/v1/.flake8 | 34 - .../google-cloud-dataplex/v1/LICENSE | 202 - .../google-cloud-dataplex/v1/MANIFEST.in | 20 - .../google-cloud-dataplex/v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../google-cloud-dataplex/v1/docs/conf.py | 385 - .../v1/docs/dataplex_v1/catalog_service.rst | 10 - .../v1/docs/dataplex_v1/cmek_service.rst | 10 - .../v1/docs/dataplex_v1/content_service.rst | 10 - .../v1/docs/dataplex_v1/data_scan_service.rst | 10 - .../dataplex_v1/data_taxonomy_service.rst | 10 - .../v1/docs/dataplex_v1/dataplex_service.rst | 10 - .../v1/docs/dataplex_v1/metadata_service.rst | 10 - .../v1/docs/dataplex_v1/services_.rst | 13 - .../v1/docs/dataplex_v1/types_.rst | 6 - .../google-cloud-dataplex/v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../v1/google/cloud/dataplex/__init__.py | 485 - .../v1/google/cloud/dataplex/gapic_version.py | 16 - .../v1/google/cloud/dataplex/py.typed | 2 - .../v1/google/cloud/dataplex_v1/__init__.py | 486 - .../cloud/dataplex_v1/gapic_metadata.json | 2006 -- .../google/cloud/dataplex_v1/gapic_version.py | 16 - .../v1/google/cloud/dataplex_v1/py.typed | 2 - .../cloud/dataplex_v1/services/__init__.py | 15 - .../services/catalog_service/__init__.py | 22 - .../services/catalog_service/async_client.py | 4109 --- .../services/catalog_service/client.py | 4507 --- .../services/catalog_service/pagers.py | 861 - .../catalog_service/transports/README.rst | 9 - .../catalog_service/transports/__init__.py | 38 - .../catalog_service/transports/base.py | 754 - .../catalog_service/transports/grpc.py | 1220 - .../transports/grpc_asyncio.py | 1521 - .../catalog_service/transports/rest.py | 6010 ---- .../catalog_service/transports/rest_base.py | 1451 - .../services/cmek_service/__init__.py | 22 - .../services/cmek_service/async_client.py | 1216 - .../services/cmek_service/client.py | 1592 - .../services/cmek_service/pagers.py | 166 - .../cmek_service/transports/README.rst | 9 - .../cmek_service/transports/__init__.py | 38 - .../services/cmek_service/transports/base.py | 307 - .../services/cmek_service/transports/grpc.py | 581 - .../cmek_service/transports/grpc_asyncio.py | 652 - .../services/cmek_service/transports/rest.py | 1907 -- .../cmek_service/transports/rest_base.py | 472 - .../services/content_service/__init__.py | 22 - .../services/content_service/async_client.py | 1497 - .../services/content_service/client.py | 1878 - .../services/content_service/pagers.py | 167 - .../content_service/transports/README.rst | 9 - .../content_service/transports/__init__.py | 38 - .../content_service/transports/base.py | 382 - .../content_service/transports/grpc.py | 664 - .../transports/grpc_asyncio.py | 786 - .../content_service/transports/rest.py | 2446 -- .../content_service/transports/rest_base.py | 644 - .../services/data_scan_service/__init__.py | 22 - .../data_scan_service/async_client.py | 1755 - .../services/data_scan_service/client.py | 2154 -- .../services/data_scan_service/pagers.py | 305 - .../data_scan_service/transports/README.rst | 9 - .../data_scan_service/transports/__init__.py | 38 - .../data_scan_service/transports/base.py | 363 - .../data_scan_service/transports/grpc.py | 691 - .../transports/grpc_asyncio.py | 782 - .../data_scan_service/transports/rest.py | 2620 -- .../data_scan_service/transports/rest_base.py | 645 - .../data_taxonomy_service/__init__.py | 22 - .../data_taxonomy_service/async_client.py | 2543 -- .../services/data_taxonomy_service/client.py | 2917 -- .../services/data_taxonomy_service/pagers.py | 444 - .../transports/README.rst | 9 - .../transports/__init__.py | 38 - .../data_taxonomy_service/transports/base.py | 448 - .../data_taxonomy_service/transports/grpc.py | 849 - .../transports/grpc_asyncio.py | 970 - .../data_taxonomy_service/transports/rest.py | 3660 -- .../transports/rest_base.py | 883 - .../services/dataplex_service/__init__.py | 22 - .../services/dataplex_service/async_client.py | 4716 --- .../services/dataplex_service/client.py | 5118 --- .../services/dataplex_service/pagers.py | 1420 - .../dataplex_service/transports/README.rst | 9 - .../dataplex_service/transports/__init__.py | 38 - .../dataplex_service/transports/base.py | 838 - .../dataplex_service/transports/grpc.py | 1323 - .../transports/grpc_asyncio.py | 1669 - .../dataplex_service/transports/rest.py | 6707 ---- .../dataplex_service/transports/rest_base.py | 1612 - .../services/metadata_service/__init__.py | 22 - .../services/metadata_service/async_client.py | 1571 - .../services/metadata_service/client.py | 1953 -- .../services/metadata_service/pagers.py | 305 - .../metadata_service/transports/README.rst | 9 - .../metadata_service/transports/__init__.py | 38 - .../metadata_service/transports/base.py | 394 - .../metadata_service/transports/grpc.py | 669 - .../transports/grpc_asyncio.py | 796 - .../metadata_service/transports/rest.py | 2403 -- .../metadata_service/transports/rest_base.py | 631 - .../cloud/dataplex_v1/types/__init__.py | 484 - .../google/cloud/dataplex_v1/types/analyze.py | 492 - .../google/cloud/dataplex_v1/types/catalog.py | 3079 -- .../v1/google/cloud/dataplex_v1/types/cmek.py | 356 - .../google/cloud/dataplex_v1/types/content.py | 227 - .../cloud/dataplex_v1/types/data_discovery.py | 364 - .../cloud/dataplex_v1/types/data_profile.py | 546 - .../cloud/dataplex_v1/types/data_quality.py | 962 - .../cloud/dataplex_v1/types/data_taxonomy.py | 972 - .../cloud/dataplex_v1/types/datascans.py | 931 - .../v1/google/cloud/dataplex_v1/types/logs.py | 1467 - .../cloud/dataplex_v1/types/metadata_.py | 1182 - .../cloud/dataplex_v1/types/processing.py | 192 - .../cloud/dataplex_v1/types/resources.py | 1436 - .../cloud/dataplex_v1/types/security.py | 90 - .../google/cloud/dataplex_v1/types/service.py | 1395 - .../google/cloud/dataplex_v1/types/tasks.py | 753 - .../google-cloud-dataplex/v1/mypy.ini | 3 - .../google-cloud-dataplex/v1/noxfile.py | 591 - ...talog_service_cancel_metadata_job_async.py | 50 - ...atalog_service_cancel_metadata_job_sync.py | 50 - ...atalog_service_create_aspect_type_async.py | 62 - ...catalog_service_create_aspect_type_sync.py | 62 - ...ated_catalog_service_create_entry_async.py | 57 - ...atalog_service_create_entry_group_async.py | 57 - ...catalog_service_create_entry_group_sync.py | 57 - ...rated_catalog_service_create_entry_sync.py | 57 - ...catalog_service_create_entry_type_async.py | 57 - ..._catalog_service_create_entry_type_sync.py | 57 - ...talog_service_create_metadata_job_async.py | 64 - ...atalog_service_create_metadata_job_sync.py | 64 - ...atalog_service_delete_aspect_type_async.py | 56 - ...catalog_service_delete_aspect_type_sync.py | 56 - ...ated_catalog_service_delete_entry_async.py | 52 - ...atalog_service_delete_entry_group_async.py | 56 - ...catalog_service_delete_entry_group_sync.py | 56 - ...rated_catalog_service_delete_entry_sync.py | 52 - ...catalog_service_delete_entry_type_async.py | 56 - ..._catalog_service_delete_entry_type_sync.py | 56 - ...d_catalog_service_get_aspect_type_async.py | 52 - ...ed_catalog_service_get_aspect_type_sync.py | 52 - ...nerated_catalog_service_get_entry_async.py | 52 - ...d_catalog_service_get_entry_group_async.py | 52 - ...ed_catalog_service_get_entry_group_sync.py | 52 - ...enerated_catalog_service_get_entry_sync.py | 52 - ...ed_catalog_service_get_entry_type_async.py | 52 - ...ted_catalog_service_get_entry_type_sync.py | 52 - ..._catalog_service_get_metadata_job_async.py | 52 - ...d_catalog_service_get_metadata_job_sync.py | 52 - ...catalog_service_list_aspect_types_async.py | 53 - ..._catalog_service_list_aspect_types_sync.py | 53 - ...ated_catalog_service_list_entries_async.py | 53 - ...rated_catalog_service_list_entries_sync.py | 53 - ...catalog_service_list_entry_groups_async.py | 53 - ..._catalog_service_list_entry_groups_sync.py | 53 - ..._catalog_service_list_entry_types_async.py | 53 - ...d_catalog_service_list_entry_types_sync.py | 53 - ...atalog_service_list_metadata_jobs_async.py | 53 - ...catalog_service_list_metadata_jobs_sync.py | 53 - ...ated_catalog_service_lookup_entry_async.py | 53 - ...rated_catalog_service_lookup_entry_sync.py | 53 - ...ed_catalog_service_search_entries_async.py | 54 - ...ted_catalog_service_search_entries_sync.py | 54 - ...atalog_service_update_aspect_type_async.py | 60 - ...catalog_service_update_aspect_type_sync.py | 60 - ...ated_catalog_service_update_entry_async.py | 55 - ...atalog_service_update_entry_group_async.py | 55 - ...catalog_service_update_entry_group_sync.py | 55 - ...rated_catalog_service_update_entry_sync.py | 55 - ...catalog_service_update_entry_type_async.py | 55 - ..._catalog_service_update_entry_type_sync.py | 55 - ..._service_create_encryption_config_async.py | 57 - ...k_service_create_encryption_config_sync.py | 57 - ..._service_delete_encryption_config_async.py | 56 - ...k_service_delete_encryption_config_sync.py | 56 - ...mek_service_get_encryption_config_async.py | 52 - ...cmek_service_get_encryption_config_sync.py | 52 - ...k_service_list_encryption_configs_async.py | 53 - ...ek_service_list_encryption_configs_sync.py | 53 - ..._service_update_encryption_config_async.py | 55 - ...k_service_update_encryption_config_sync.py | 55 - ...ed_content_service_create_content_async.py | 58 - ...ted_content_service_create_content_sync.py | 58 - ...ed_content_service_delete_content_async.py | 50 - ...ted_content_service_delete_content_sync.py | 50 - ...rated_content_service_get_content_async.py | 52 - ...erated_content_service_get_content_sync.py | 52 - ...ed_content_service_get_iam_policy_async.py | 53 - ...ted_content_service_get_iam_policy_sync.py | 53 - ...ated_content_service_list_content_async.py | 53 - ...rated_content_service_list_content_sync.py | 53 - ...ed_content_service_set_iam_policy_async.py | 53 - ...ted_content_service_set_iam_policy_sync.py | 53 - ...tent_service_test_iam_permissions_async.py | 54 - ...ntent_service_test_iam_permissions_sync.py | 54 - ...ed_content_service_update_content_async.py | 57 - ...ted_content_service_update_content_sync.py | 57 - ...ata_scan_service_create_data_scan_async.py | 62 - ...data_scan_service_create_data_scan_sync.py | 62 - ...ata_scan_service_delete_data_scan_async.py | 56 - ...data_scan_service_delete_data_scan_sync.py | 56 - ...rvice_generate_data_quality_rules_async.py | 52 - ...ervice_generate_data_quality_rules_sync.py | 52 - ...d_data_scan_service_get_data_scan_async.py | 52 - ...ta_scan_service_get_data_scan_job_async.py | 52 - ...ata_scan_service_get_data_scan_job_sync.py | 52 - ...ed_data_scan_service_get_data_scan_sync.py | 52 - ..._scan_service_list_data_scan_jobs_async.py | 53 - ...a_scan_service_list_data_scan_jobs_sync.py | 53 - ...data_scan_service_list_data_scans_async.py | 53 - ..._data_scan_service_list_data_scans_sync.py | 53 - ...d_data_scan_service_run_data_scan_async.py | 52 - ...ed_data_scan_service_run_data_scan_sync.py | 52 - ...ata_scan_service_update_data_scan_async.py | 60 - ...data_scan_service_update_data_scan_sync.py | 60 - ...omy_service_create_data_attribute_async.py | 57 - ...ice_create_data_attribute_binding_async.py | 61 - ...vice_create_data_attribute_binding_sync.py | 61 - ...nomy_service_create_data_attribute_sync.py | 57 - ...nomy_service_create_data_taxonomy_async.py | 57 - ...onomy_service_create_data_taxonomy_sync.py | 57 - ...omy_service_delete_data_attribute_async.py | 56 - ...ice_delete_data_attribute_binding_async.py | 57 - ...vice_delete_data_attribute_binding_sync.py | 57 - ...nomy_service_delete_data_attribute_sync.py | 56 - ...nomy_service_delete_data_taxonomy_async.py | 56 - ...onomy_service_delete_data_taxonomy_sync.py | 56 - ...xonomy_service_get_data_attribute_async.py | 52 - ...ervice_get_data_attribute_binding_async.py | 52 - ...service_get_data_attribute_binding_sync.py | 52 - ...axonomy_service_get_data_attribute_sync.py | 52 - ...axonomy_service_get_data_taxonomy_async.py | 52 - ...taxonomy_service_get_data_taxonomy_sync.py | 52 - ...vice_list_data_attribute_bindings_async.py | 53 - ...rvice_list_data_attribute_bindings_sync.py | 53 - ...nomy_service_list_data_attributes_async.py | 53 - ...onomy_service_list_data_attributes_sync.py | 53 - ...nomy_service_list_data_taxonomies_async.py | 53 - ...onomy_service_list_data_taxonomies_sync.py | 53 - ...omy_service_update_data_attribute_async.py | 55 - ...ice_update_data_attribute_binding_async.py | 59 - ...vice_update_data_attribute_binding_sync.py | 59 - ...nomy_service_update_data_attribute_sync.py | 55 - ...nomy_service_update_data_taxonomy_async.py | 55 - ...onomy_service_update_data_taxonomy_sync.py | 55 - ...rated_dataplex_service_cancel_job_async.py | 50 - ...erated_dataplex_service_cancel_job_sync.py | 50 - ...ted_dataplex_service_create_asset_async.py | 61 - ...ated_dataplex_service_create_asset_sync.py | 61 - ...taplex_service_create_environment_async.py | 61 - ...ataplex_service_create_environment_sync.py | 61 - ...ated_dataplex_service_create_lake_async.py | 57 - ...rated_dataplex_service_create_lake_sync.py | 57 - ...ated_dataplex_service_create_task_async.py | 64 - ...rated_dataplex_service_create_task_sync.py | 64 - ...ated_dataplex_service_create_zone_async.py | 62 - ...rated_dataplex_service_create_zone_sync.py | 62 - ...ted_dataplex_service_delete_asset_async.py | 56 - ...ated_dataplex_service_delete_asset_sync.py | 56 - ...taplex_service_delete_environment_async.py | 56 - ...ataplex_service_delete_environment_sync.py | 56 - ...ated_dataplex_service_delete_lake_async.py | 56 - ...rated_dataplex_service_delete_lake_sync.py | 56 - ...ated_dataplex_service_delete_task_async.py | 56 - ...rated_dataplex_service_delete_task_sync.py | 56 - ...ated_dataplex_service_delete_zone_async.py | 56 - ...rated_dataplex_service_delete_zone_sync.py | 56 - ...erated_dataplex_service_get_asset_async.py | 52 - ...nerated_dataplex_service_get_asset_sync.py | 52 - ..._dataplex_service_get_environment_async.py | 52 - ...d_dataplex_service_get_environment_sync.py | 52 - ...enerated_dataplex_service_get_job_async.py | 52 - ...generated_dataplex_service_get_job_sync.py | 52 - ...nerated_dataplex_service_get_lake_async.py | 52 - ...enerated_dataplex_service_get_lake_sync.py | 52 - ...nerated_dataplex_service_get_task_async.py | 52 - ...enerated_dataplex_service_get_task_sync.py | 52 - ...nerated_dataplex_service_get_zone_async.py | 52 - ...enerated_dataplex_service_get_zone_sync.py | 52 - ...taplex_service_list_asset_actions_async.py | 53 - ...ataplex_service_list_asset_actions_sync.py | 53 - ...ated_dataplex_service_list_assets_async.py | 53 - ...rated_dataplex_service_list_assets_sync.py | 53 - ...ataplex_service_list_environments_async.py | 53 - ...dataplex_service_list_environments_sync.py | 53 - ...erated_dataplex_service_list_jobs_async.py | 53 - ...nerated_dataplex_service_list_jobs_sync.py | 53 - ...ataplex_service_list_lake_actions_async.py | 53 - ...dataplex_service_list_lake_actions_sync.py | 53 - ...rated_dataplex_service_list_lakes_async.py | 53 - ...erated_dataplex_service_list_lakes_sync.py | 53 - ...ed_dataplex_service_list_sessions_async.py | 53 - ...ted_dataplex_service_list_sessions_sync.py | 53 - ...rated_dataplex_service_list_tasks_async.py | 53 - ...erated_dataplex_service_list_tasks_sync.py | 53 - ...ataplex_service_list_zone_actions_async.py | 53 - ...dataplex_service_list_zone_actions_sync.py | 53 - ...rated_dataplex_service_list_zones_async.py | 53 - ...erated_dataplex_service_list_zones_sync.py | 53 - ...nerated_dataplex_service_run_task_async.py | 52 - ...enerated_dataplex_service_run_task_sync.py | 52 - ...ted_dataplex_service_update_asset_async.py | 59 - ...ated_dataplex_service_update_asset_sync.py | 59 - ...taplex_service_update_environment_async.py | 59 - ...ataplex_service_update_environment_sync.py | 59 - ...ated_dataplex_service_update_lake_async.py | 55 - ...rated_dataplex_service_update_lake_sync.py | 55 - ...ated_dataplex_service_update_task_async.py | 62 - ...rated_dataplex_service_update_task_sync.py | 62 - ...ated_dataplex_service_update_zone_async.py | 60 - ...rated_dataplex_service_update_zone_sync.py | 60 - ...ed_metadata_service_create_entity_async.py | 62 - ...ted_metadata_service_create_entity_sync.py | 62 - ...metadata_service_create_partition_async.py | 57 - ..._metadata_service_create_partition_sync.py | 57 - ...ed_metadata_service_delete_entity_async.py | 51 - ...ted_metadata_service_delete_entity_sync.py | 51 - ...metadata_service_delete_partition_async.py | 50 - ..._metadata_service_delete_partition_sync.py | 50 - ...rated_metadata_service_get_entity_async.py | 52 - ...erated_metadata_service_get_entity_sync.py | 52 - ...ed_metadata_service_get_partition_async.py | 52 - ...ted_metadata_service_get_partition_sync.py | 52 - ...ed_metadata_service_list_entities_async.py | 54 - ...ted_metadata_service_list_entities_sync.py | 54 - ..._metadata_service_list_partitions_async.py | 53 - ...d_metadata_service_list_partitions_sync.py | 53 - ...ed_metadata_service_update_entity_async.py | 61 - ...ted_metadata_service_update_entity_sync.py | 61 - ...pet_metadata_google.cloud.dataplex.v1.json | 20224 ----------- .../v1/scripts/fixup_dataplex_v1_keywords.py | 298 - .../google-cloud-dataplex/v1/setup.py | 99 - .../v1/testing/constraints-3.10.txt | 7 - .../v1/testing/constraints-3.11.txt | 7 - .../v1/testing/constraints-3.12.txt | 7 - .../v1/testing/constraints-3.13.txt | 12 - .../v1/testing/constraints-3.7.txt | 11 - .../v1/testing/constraints-3.8.txt | 7 - .../v1/testing/constraints-3.9.txt | 7 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/dataplex_v1/__init__.py | 16 - .../gapic/dataplex_v1/test_catalog_service.py | 24562 -------------- .../gapic/dataplex_v1/test_cmek_service.py | 6432 ---- .../gapic/dataplex_v1/test_content_service.py | 8115 ----- .../dataplex_v1/test_data_scan_service.py | 9435 ------ .../dataplex_v1/test_data_taxonomy_service.py | 14068 -------- .../dataplex_v1/test_dataplex_service.py | 28263 ---------------- .../dataplex_v1/test_metadata_service.py | 9404 ----- .../dataplex_v1/business_glossary_service.rst | 0 .../docs/dataplex_v1/services_.rst | 1 + .../google/cloud/dataplex/__init__.py | 64 + .../google/cloud/dataplex_v1/__init__.py | 60 + .../cloud/dataplex_v1/gapic_metadata.json | 289 + .../business_glossary_service/__init__.py | 6 +- .../business_glossary_service/async_client.py | 776 +- .../business_glossary_service/client.py | 902 +- .../business_glossary_service/pagers.py | 195 +- .../transports/README.rst | 0 .../transports/__init__.py | 27 +- .../transports/base.py | 323 +- .../transports/grpc.py | 359 +- .../transports/grpc_asyncio.py | 382 +- .../transports/rest.py | 2560 +- .../transports/rest_base.py | 729 +- .../services/catalog_service/async_client.py | 385 +- .../services/catalog_service/client.py | 418 +- .../catalog_service/transports/base.py | 42 + .../catalog_service/transports/grpc.py | 88 +- .../transports/grpc_asyncio.py | 103 +- .../catalog_service/transports/rest.py | 640 +- .../catalog_service/transports/rest_base.py | 155 +- .../services/cmek_service/async_client.py | 4 +- .../services/cmek_service/client.py | 4 +- .../services/cmek_service/transports/grpc.py | 3 +- .../cmek_service/transports/grpc_asyncio.py | 3 +- .../services/cmek_service/transports/rest.py | 5 +- .../cmek_service/transports/rest_base.py | 2 +- .../services/content_service/async_client.py | 4 +- .../services/content_service/client.py | 4 +- .../content_service/transports/grpc.py | 3 +- .../transports/grpc_asyncio.py | 3 +- .../content_service/transports/rest.py | 3 +- .../content_service/transports/rest_base.py | 2 +- .../data_scan_service/async_client.py | 24 +- .../services/data_scan_service/client.py | 24 +- .../data_scan_service/transports/rest.py | 2 +- .../data_scan_service/transports/rest_base.py | 2 +- .../data_taxonomy_service/async_client.py | 2 +- .../services/data_taxonomy_service/client.py | 2 +- .../data_taxonomy_service/transports/rest.py | 2 +- .../transports/rest_base.py | 2 +- .../services/dataplex_service/async_client.py | 4 +- .../services/dataplex_service/client.py | 4 +- .../dataplex_service/transports/rest.py | 2 +- .../dataplex_service/transports/rest_base.py | 2 +- .../metadata_service/transports/rest_base.py | 2 +- .../cloud/dataplex_v1/types/__init__.py | 54 + .../google/cloud/dataplex_v1/types/analyze.py | 3 +- .../dataplex_v1/types/business_glossary.py | 98 +- .../google/cloud/dataplex_v1/types/catalog.py | 329 +- .../cloud/dataplex_v1/types/data_discovery.py | 9 + .../cloud/dataplex_v1/types/data_profile.py | 186 +- .../cloud/dataplex_v1/types/data_quality.py | 53 +- .../cloud/dataplex_v1/types/data_taxonomy.py | 2 +- .../cloud/dataplex_v1/types/datascans.py | 29 +- .../dataplex_v1/types/datascans_common.py | 6 +- .../google/cloud/dataplex_v1/types/logs.py | 19 +- .../cloud/dataplex_v1/types/metadata_.py | 21 +- .../cloud/dataplex_v1/types/processing.py | 20 +- .../google/cloud/dataplex_v1/types/service.py | 4 +- .../google/cloud/dataplex_v1/types/tasks.py | 8 +- ..._glossary_service_create_glossary_async.py | 0 ..._service_create_glossary_category_async.py | 0 ...y_service_create_glossary_category_sync.py | 0 ...s_glossary_service_create_glossary_sync.py | 0 ...sary_service_create_glossary_term_async.py | 0 ...ssary_service_create_glossary_term_sync.py | 0 ..._glossary_service_delete_glossary_async.py | 0 ..._service_delete_glossary_category_async.py | 0 ...y_service_delete_glossary_category_sync.py | 0 ...s_glossary_service_delete_glossary_sync.py | 0 ...sary_service_delete_glossary_term_async.py | 0 ...ssary_service_delete_glossary_term_sync.py | 0 ...ess_glossary_service_get_glossary_async.py | 0 ...ary_service_get_glossary_category_async.py | 0 ...sary_service_get_glossary_category_sync.py | 0 ...ness_glossary_service_get_glossary_sync.py | 0 ...lossary_service_get_glossary_term_async.py | 0 ...glossary_service_get_glossary_term_sync.py | 0 ..._glossary_service_list_glossaries_async.py | 0 ...s_glossary_service_list_glossaries_sync.py | 0 ..._service_list_glossary_categories_async.py | 0 ...y_service_list_glossary_categories_sync.py | 0 ...ssary_service_list_glossary_terms_async.py | 0 ...ossary_service_list_glossary_terms_sync.py | 0 ..._glossary_service_update_glossary_async.py | 0 ..._service_update_glossary_category_async.py | 0 ...y_service_update_glossary_category_sync.py | 0 ...s_glossary_service_update_glossary_sync.py | 0 ...sary_service_update_glossary_term_async.py | 0 ...ssary_service_update_glossary_term_sync.py | 0 ...catalog_service_create_entry_link_async.py | 0 ..._catalog_service_create_entry_link_sync.py | 0 ...catalog_service_delete_entry_link_async.py | 0 ..._catalog_service_delete_entry_link_sync.py | 0 ...ed_catalog_service_get_entry_link_async.py | 0 ...ted_catalog_service_get_entry_link_sync.py | 0 ...pet_metadata_google.cloud.dataplex.v1.json | 3230 +- .../scripts/fixup_dataplex_v1_keywords.py | 18 + .../test_business_glossary_service.py | 7495 ++-- .../gapic/dataplex_v1/test_catalog_service.py | 10845 +++--- .../dataplex_v1/test_data_scan_service.py | 25 +- 458 files changed, 21245 insertions(+), 252325 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/.flake8 delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/LICENSE delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py delete mode 100644 owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/docs/dataplex_v1/business_glossary_service.rst (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py (90%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py (82%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/client.py (82%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py (77%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py (59%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py (63%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py (78%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py (79%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py (66%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py (53%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/types/business_glossary.py (91%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/google/cloud/dataplex_v1/types/datascans_common.py (95%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py (100%) rename {owl-bot-staging/google-cloud-dataplex/v1 => packages/google-cloud-dataplex}/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py (71%) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc b/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc deleted file mode 100644 index 8df508b38cbc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dataplex/__init__.py - google/cloud/dataplex/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 b/owl-bot-staging/google-cloud-dataplex/v1/.flake8 deleted file mode 100644 index 90316de21489..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/LICENSE b/owl-bot-staging/google-cloud-dataplex/v1/LICENSE deleted file mode 100644 index d64569567334..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in b/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in deleted file mode 100644 index dae249ec8976..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/README.rst deleted file mode 100644 index 7b2028d76ab9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Dataplex API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dataplex API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b23..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html b/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcfe1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py b/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py deleted file mode 100644 index 5128564a815e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dataplex documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dataplex" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-dataplex", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dataplex-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dataplex.tex", - u"google-cloud-dataplex Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dataplex", - "google-cloud-dataplex Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dataplex", - "google-cloud-dataplex Documentation", - author, - "google-cloud-dataplex", - "google-cloud-dataplex Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst deleted file mode 100644 index ef6306fadb87..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/catalog_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CatalogService --------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.catalog_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.catalog_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst deleted file mode 100644 index 5eae398d0f87..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/cmek_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CmekService ------------------------------ - -.. automodule:: google.cloud.dataplex_v1.services.cmek_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.cmek_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst deleted file mode 100644 index ce3774365501..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/content_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContentService --------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.content_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.content_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst deleted file mode 100644 index c9281cda5823..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_scan_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataScanService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.data_scan_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.data_scan_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst deleted file mode 100644 index b2a185a3c43f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/data_taxonomy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataTaxonomyService -------------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst deleted file mode 100644 index 5ecb20ccef96..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/dataplex_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataplexService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.dataplex_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.dataplex_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst deleted file mode 100644 index d5bf19660ab5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/metadata_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetadataService ---------------------------------- - -.. automodule:: google.cloud.dataplex_v1.services.metadata_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataplex_v1.services.metadata_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst deleted file mode 100644 index 4f97a5efe7f1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/services_.rst +++ /dev/null @@ -1,13 +0,0 @@ -Services for Google Cloud Dataplex v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - business_glossary_service - catalog_service - cmek_service - content_service - dataplex_service - data_scan_service - data_taxonomy_service - metadata_service diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst deleted file mode 100644 index 391acd51ef80..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dataplex v1 API -====================================== - -.. automodule:: google.cloud.dataplex_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst deleted file mode 100644 index 03a62592ed5f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dataplex_v1/services_ - dataplex_v1/types_ diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst b/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea65..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py deleted file mode 100644 index 4a17145224d8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/__init__.py +++ /dev/null @@ -1,485 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataplex import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dataplex_v1.services.business_glossary_service.client import BusinessGlossaryServiceClient -from google.cloud.dataplex_v1.services.business_glossary_service.async_client import BusinessGlossaryServiceAsyncClient -from google.cloud.dataplex_v1.services.catalog_service.client import CatalogServiceClient -from google.cloud.dataplex_v1.services.catalog_service.async_client import CatalogServiceAsyncClient -from google.cloud.dataplex_v1.services.cmek_service.client import CmekServiceClient -from google.cloud.dataplex_v1.services.cmek_service.async_client import CmekServiceAsyncClient -from google.cloud.dataplex_v1.services.content_service.client import ContentServiceClient -from google.cloud.dataplex_v1.services.content_service.async_client import ContentServiceAsyncClient -from google.cloud.dataplex_v1.services.dataplex_service.client import DataplexServiceClient -from google.cloud.dataplex_v1.services.dataplex_service.async_client import DataplexServiceAsyncClient -from google.cloud.dataplex_v1.services.data_scan_service.client import DataScanServiceClient -from google.cloud.dataplex_v1.services.data_scan_service.async_client import DataScanServiceAsyncClient -from google.cloud.dataplex_v1.services.data_taxonomy_service.client import DataTaxonomyServiceClient -from google.cloud.dataplex_v1.services.data_taxonomy_service.async_client import DataTaxonomyServiceAsyncClient -from google.cloud.dataplex_v1.services.metadata_service.client import MetadataServiceClient -from google.cloud.dataplex_v1.services.metadata_service.async_client import MetadataServiceAsyncClient - -from google.cloud.dataplex_v1.types.analyze import Content -from google.cloud.dataplex_v1.types.analyze import Environment -from google.cloud.dataplex_v1.types.analyze import Session -from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryCategoryRequest -from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryRequest -from google.cloud.dataplex_v1.types.business_glossary import CreateGlossaryTermRequest -from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryCategoryRequest -from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryRequest -from google.cloud.dataplex_v1.types.business_glossary import DeleteGlossaryTermRequest -from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryCategoryRequest -from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryRequest -from google.cloud.dataplex_v1.types.business_glossary import GetGlossaryTermRequest -from google.cloud.dataplex_v1.types.business_glossary import Glossary -from google.cloud.dataplex_v1.types.business_glossary import GlossaryCategory -from google.cloud.dataplex_v1.types.business_glossary import GlossaryTerm -from google.cloud.dataplex_v1.types.business_glossary import ListGlossariesRequest -from google.cloud.dataplex_v1.types.business_glossary import ListGlossariesResponse -from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryCategoriesRequest -from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryCategoriesResponse -from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryTermsRequest -from google.cloud.dataplex_v1.types.business_glossary import ListGlossaryTermsResponse -from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryCategoryRequest -from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryRequest -from google.cloud.dataplex_v1.types.business_glossary import UpdateGlossaryTermRequest -from google.cloud.dataplex_v1.types.catalog import Aspect -from google.cloud.dataplex_v1.types.catalog import AspectSource -from google.cloud.dataplex_v1.types.catalog import AspectType -from google.cloud.dataplex_v1.types.catalog import CancelMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import CreateAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryLinkRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryRequest -from google.cloud.dataplex_v1.types.catalog import CreateEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import CreateMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import DeleteAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryLinkRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryRequest -from google.cloud.dataplex_v1.types.catalog import DeleteEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import Entry -from google.cloud.dataplex_v1.types.catalog import EntryGroup -from google.cloud.dataplex_v1.types.catalog import EntryLink -from google.cloud.dataplex_v1.types.catalog import EntrySource -from google.cloud.dataplex_v1.types.catalog import EntryType -from google.cloud.dataplex_v1.types.catalog import GetAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryLinkRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryRequest -from google.cloud.dataplex_v1.types.catalog import GetEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import GetMetadataJobRequest -from google.cloud.dataplex_v1.types.catalog import ImportItem -from google.cloud.dataplex_v1.types.catalog import ListAspectTypesRequest -from google.cloud.dataplex_v1.types.catalog import ListAspectTypesResponse -from google.cloud.dataplex_v1.types.catalog import ListEntriesRequest -from google.cloud.dataplex_v1.types.catalog import ListEntriesResponse -from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsRequest -from google.cloud.dataplex_v1.types.catalog import ListEntryGroupsResponse -from google.cloud.dataplex_v1.types.catalog import ListEntryTypesRequest -from google.cloud.dataplex_v1.types.catalog import ListEntryTypesResponse -from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsRequest -from google.cloud.dataplex_v1.types.catalog import ListMetadataJobsResponse -from google.cloud.dataplex_v1.types.catalog import LookupEntryRequest -from google.cloud.dataplex_v1.types.catalog import MetadataJob -from google.cloud.dataplex_v1.types.catalog import SearchEntriesRequest -from google.cloud.dataplex_v1.types.catalog import SearchEntriesResponse -from google.cloud.dataplex_v1.types.catalog import SearchEntriesResult -from google.cloud.dataplex_v1.types.catalog import UpdateAspectTypeRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryGroupRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryRequest -from google.cloud.dataplex_v1.types.catalog import UpdateEntryTypeRequest -from google.cloud.dataplex_v1.types.catalog import EntryView -from google.cloud.dataplex_v1.types.catalog import TransferStatus -from google.cloud.dataplex_v1.types.cmek import CreateEncryptionConfigRequest -from google.cloud.dataplex_v1.types.cmek import DeleteEncryptionConfigRequest -from google.cloud.dataplex_v1.types.cmek import EncryptionConfig -from google.cloud.dataplex_v1.types.cmek import GetEncryptionConfigRequest -from google.cloud.dataplex_v1.types.cmek import ListEncryptionConfigsRequest -from google.cloud.dataplex_v1.types.cmek import ListEncryptionConfigsResponse -from google.cloud.dataplex_v1.types.cmek import UpdateEncryptionConfigRequest -from google.cloud.dataplex_v1.types.content import CreateContentRequest -from google.cloud.dataplex_v1.types.content import DeleteContentRequest -from google.cloud.dataplex_v1.types.content import GetContentRequest -from google.cloud.dataplex_v1.types.content import ListContentRequest -from google.cloud.dataplex_v1.types.content import ListContentResponse -from google.cloud.dataplex_v1.types.content import UpdateContentRequest -from google.cloud.dataplex_v1.types.data_discovery import DataDiscoveryResult -from google.cloud.dataplex_v1.types.data_discovery import DataDiscoverySpec -from google.cloud.dataplex_v1.types.data_profile import DataProfileResult -from google.cloud.dataplex_v1.types.data_profile import DataProfileSpec -from google.cloud.dataplex_v1.types.data_quality import DataQualityColumnResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityDimension -from google.cloud.dataplex_v1.types.data_quality import DataQualityDimensionResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityResult -from google.cloud.dataplex_v1.types.data_quality import DataQualityRule -from google.cloud.dataplex_v1.types.data_quality import DataQualityRuleResult -from google.cloud.dataplex_v1.types.data_quality import DataQualitySpec -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import CreateDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DataAttribute -from google.cloud.dataplex_v1.types.data_taxonomy import DataAttributeBinding -from google.cloud.dataplex_v1.types.data_taxonomy import DataTaxonomy -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import DeleteDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import GetDataTaxonomyRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributeBindingsResponse -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataAttributesResponse -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesRequest -from google.cloud.dataplex_v1.types.data_taxonomy import ListDataTaxonomiesResponse -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeBindingRequest -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataAttributeRequest -from google.cloud.dataplex_v1.types.data_taxonomy import UpdateDataTaxonomyRequest -from google.cloud.dataplex_v1.types.datascans import CreateDataScanRequest -from google.cloud.dataplex_v1.types.datascans import DataScan -from google.cloud.dataplex_v1.types.datascans import DataScanJob -from google.cloud.dataplex_v1.types.datascans import DeleteDataScanRequest -from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesRequest -from google.cloud.dataplex_v1.types.datascans import GenerateDataQualityRulesResponse -from google.cloud.dataplex_v1.types.datascans import GetDataScanJobRequest -from google.cloud.dataplex_v1.types.datascans import GetDataScanRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScanJobsResponse -from google.cloud.dataplex_v1.types.datascans import ListDataScansRequest -from google.cloud.dataplex_v1.types.datascans import ListDataScansResponse -from google.cloud.dataplex_v1.types.datascans import RunDataScanRequest -from google.cloud.dataplex_v1.types.datascans import RunDataScanResponse -from google.cloud.dataplex_v1.types.datascans import UpdateDataScanRequest -from google.cloud.dataplex_v1.types.datascans import DataScanType -from google.cloud.dataplex_v1.types.datascans_common import DataScanCatalogPublishingStatus -from google.cloud.dataplex_v1.types.logs import BusinessGlossaryEvent -from google.cloud.dataplex_v1.types.logs import DataQualityScanRuleResult -from google.cloud.dataplex_v1.types.logs import DataScanEvent -from google.cloud.dataplex_v1.types.logs import DiscoveryEvent -from google.cloud.dataplex_v1.types.logs import EntryLinkEvent -from google.cloud.dataplex_v1.types.logs import GovernanceEvent -from google.cloud.dataplex_v1.types.logs import JobEvent -from google.cloud.dataplex_v1.types.logs import SessionEvent -from google.cloud.dataplex_v1.types.metadata_ import CreateEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import CreatePartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import DeleteEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import DeletePartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import Entity -from google.cloud.dataplex_v1.types.metadata_ import GetEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import GetPartitionRequest -from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesRequest -from google.cloud.dataplex_v1.types.metadata_ import ListEntitiesResponse -from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsRequest -from google.cloud.dataplex_v1.types.metadata_ import ListPartitionsResponse -from google.cloud.dataplex_v1.types.metadata_ import Partition -from google.cloud.dataplex_v1.types.metadata_ import Schema -from google.cloud.dataplex_v1.types.metadata_ import StorageAccess -from google.cloud.dataplex_v1.types.metadata_ import StorageFormat -from google.cloud.dataplex_v1.types.metadata_ import UpdateEntityRequest -from google.cloud.dataplex_v1.types.metadata_ import StorageSystem -from google.cloud.dataplex_v1.types.processing import DataSource -from google.cloud.dataplex_v1.types.processing import ScannedData -from google.cloud.dataplex_v1.types.processing import Trigger -from google.cloud.dataplex_v1.types.resources import Action -from google.cloud.dataplex_v1.types.resources import Asset -from google.cloud.dataplex_v1.types.resources import AssetStatus -from google.cloud.dataplex_v1.types.resources import Lake -from google.cloud.dataplex_v1.types.resources import Zone -from google.cloud.dataplex_v1.types.resources import State -from google.cloud.dataplex_v1.types.security import DataAccessSpec -from google.cloud.dataplex_v1.types.security import ResourceAccessSpec -from google.cloud.dataplex_v1.types.service import CancelJobRequest -from google.cloud.dataplex_v1.types.service import CreateAssetRequest -from google.cloud.dataplex_v1.types.service import CreateEnvironmentRequest -from google.cloud.dataplex_v1.types.service import CreateLakeRequest -from google.cloud.dataplex_v1.types.service import CreateTaskRequest -from google.cloud.dataplex_v1.types.service import CreateZoneRequest -from google.cloud.dataplex_v1.types.service import DeleteAssetRequest -from google.cloud.dataplex_v1.types.service import DeleteEnvironmentRequest -from google.cloud.dataplex_v1.types.service import DeleteLakeRequest -from google.cloud.dataplex_v1.types.service import DeleteTaskRequest -from google.cloud.dataplex_v1.types.service import DeleteZoneRequest -from google.cloud.dataplex_v1.types.service import GetAssetRequest -from google.cloud.dataplex_v1.types.service import GetEnvironmentRequest -from google.cloud.dataplex_v1.types.service import GetJobRequest -from google.cloud.dataplex_v1.types.service import GetLakeRequest -from google.cloud.dataplex_v1.types.service import GetTaskRequest -from google.cloud.dataplex_v1.types.service import GetZoneRequest -from google.cloud.dataplex_v1.types.service import ListActionsResponse -from google.cloud.dataplex_v1.types.service import ListAssetActionsRequest -from google.cloud.dataplex_v1.types.service import ListAssetsRequest -from google.cloud.dataplex_v1.types.service import ListAssetsResponse -from google.cloud.dataplex_v1.types.service import ListEnvironmentsRequest -from google.cloud.dataplex_v1.types.service import ListEnvironmentsResponse -from google.cloud.dataplex_v1.types.service import ListJobsRequest -from google.cloud.dataplex_v1.types.service import ListJobsResponse -from google.cloud.dataplex_v1.types.service import ListLakeActionsRequest -from google.cloud.dataplex_v1.types.service import ListLakesRequest -from google.cloud.dataplex_v1.types.service import ListLakesResponse -from google.cloud.dataplex_v1.types.service import ListSessionsRequest -from google.cloud.dataplex_v1.types.service import ListSessionsResponse -from google.cloud.dataplex_v1.types.service import ListTasksRequest -from google.cloud.dataplex_v1.types.service import ListTasksResponse -from google.cloud.dataplex_v1.types.service import ListZoneActionsRequest -from google.cloud.dataplex_v1.types.service import ListZonesRequest -from google.cloud.dataplex_v1.types.service import ListZonesResponse -from google.cloud.dataplex_v1.types.service import OperationMetadata -from google.cloud.dataplex_v1.types.service import RunTaskRequest -from google.cloud.dataplex_v1.types.service import RunTaskResponse -from google.cloud.dataplex_v1.types.service import UpdateAssetRequest -from google.cloud.dataplex_v1.types.service import UpdateEnvironmentRequest -from google.cloud.dataplex_v1.types.service import UpdateLakeRequest -from google.cloud.dataplex_v1.types.service import UpdateTaskRequest -from google.cloud.dataplex_v1.types.service import UpdateZoneRequest -from google.cloud.dataplex_v1.types.tasks import Job -from google.cloud.dataplex_v1.types.tasks import Task - -__all__ = ('BusinessGlossaryServiceClient', - 'BusinessGlossaryServiceAsyncClient', - 'CatalogServiceClient', - 'CatalogServiceAsyncClient', - 'CmekServiceClient', - 'CmekServiceAsyncClient', - 'ContentServiceClient', - 'ContentServiceAsyncClient', - 'DataplexServiceClient', - 'DataplexServiceAsyncClient', - 'DataScanServiceClient', - 'DataScanServiceAsyncClient', - 'DataTaxonomyServiceClient', - 'DataTaxonomyServiceAsyncClient', - 'MetadataServiceClient', - 'MetadataServiceAsyncClient', - 'Content', - 'Environment', - 'Session', - 'CreateGlossaryCategoryRequest', - 'CreateGlossaryRequest', - 'CreateGlossaryTermRequest', - 'DeleteGlossaryCategoryRequest', - 'DeleteGlossaryRequest', - 'DeleteGlossaryTermRequest', - 'GetGlossaryCategoryRequest', - 'GetGlossaryRequest', - 'GetGlossaryTermRequest', - 'Glossary', - 'GlossaryCategory', - 'GlossaryTerm', - 'ListGlossariesRequest', - 'ListGlossariesResponse', - 'ListGlossaryCategoriesRequest', - 'ListGlossaryCategoriesResponse', - 'ListGlossaryTermsRequest', - 'ListGlossaryTermsResponse', - 'UpdateGlossaryCategoryRequest', - 'UpdateGlossaryRequest', - 'UpdateGlossaryTermRequest', - 'Aspect', - 'AspectSource', - 'AspectType', - 'CancelMetadataJobRequest', - 'CreateAspectTypeRequest', - 'CreateEntryGroupRequest', - 'CreateEntryLinkRequest', - 'CreateEntryRequest', - 'CreateEntryTypeRequest', - 'CreateMetadataJobRequest', - 'DeleteAspectTypeRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryLinkRequest', - 'DeleteEntryRequest', - 'DeleteEntryTypeRequest', - 'Entry', - 'EntryGroup', - 'EntryLink', - 'EntrySource', - 'EntryType', - 'GetAspectTypeRequest', - 'GetEntryGroupRequest', - 'GetEntryLinkRequest', - 'GetEntryRequest', - 'GetEntryTypeRequest', - 'GetMetadataJobRequest', - 'ImportItem', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'LookupEntryRequest', - 'MetadataJob', - 'SearchEntriesRequest', - 'SearchEntriesResponse', - 'SearchEntriesResult', - 'UpdateAspectTypeRequest', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateEntryTypeRequest', - 'EntryView', - 'TransferStatus', - 'CreateEncryptionConfigRequest', - 'DeleteEncryptionConfigRequest', - 'EncryptionConfig', - 'GetEncryptionConfigRequest', - 'ListEncryptionConfigsRequest', - 'ListEncryptionConfigsResponse', - 'UpdateEncryptionConfigRequest', - 'CreateContentRequest', - 'DeleteContentRequest', - 'GetContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'UpdateContentRequest', - 'DataDiscoveryResult', - 'DataDiscoverySpec', - 'DataProfileResult', - 'DataProfileSpec', - 'DataQualityColumnResult', - 'DataQualityDimension', - 'DataQualityDimensionResult', - 'DataQualityResult', - 'DataQualityRule', - 'DataQualityRuleResult', - 'DataQualitySpec', - 'CreateDataAttributeBindingRequest', - 'CreateDataAttributeRequest', - 'CreateDataTaxonomyRequest', - 'DataAttribute', - 'DataAttributeBinding', - 'DataTaxonomy', - 'DeleteDataAttributeBindingRequest', - 'DeleteDataAttributeRequest', - 'DeleteDataTaxonomyRequest', - 'GetDataAttributeBindingRequest', - 'GetDataAttributeRequest', - 'GetDataTaxonomyRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'UpdateDataAttributeBindingRequest', - 'UpdateDataAttributeRequest', - 'UpdateDataTaxonomyRequest', - 'CreateDataScanRequest', - 'DataScan', - 'DataScanJob', - 'DeleteDataScanRequest', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'GetDataScanJobRequest', - 'GetDataScanRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'UpdateDataScanRequest', - 'DataScanType', - 'DataScanCatalogPublishingStatus', - 'BusinessGlossaryEvent', - 'DataQualityScanRuleResult', - 'DataScanEvent', - 'DiscoveryEvent', - 'EntryLinkEvent', - 'GovernanceEvent', - 'JobEvent', - 'SessionEvent', - 'CreateEntityRequest', - 'CreatePartitionRequest', - 'DeleteEntityRequest', - 'DeletePartitionRequest', - 'Entity', - 'GetEntityRequest', - 'GetPartitionRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'ListPartitionsRequest', - 'ListPartitionsResponse', - 'Partition', - 'Schema', - 'StorageAccess', - 'StorageFormat', - 'UpdateEntityRequest', - 'StorageSystem', - 'DataSource', - 'ScannedData', - 'Trigger', - 'Action', - 'Asset', - 'AssetStatus', - 'Lake', - 'Zone', - 'State', - 'DataAccessSpec', - 'ResourceAccessSpec', - 'CancelJobRequest', - 'CreateAssetRequest', - 'CreateEnvironmentRequest', - 'CreateLakeRequest', - 'CreateTaskRequest', - 'CreateZoneRequest', - 'DeleteAssetRequest', - 'DeleteEnvironmentRequest', - 'DeleteLakeRequest', - 'DeleteTaskRequest', - 'DeleteZoneRequest', - 'GetAssetRequest', - 'GetEnvironmentRequest', - 'GetJobRequest', - 'GetLakeRequest', - 'GetTaskRequest', - 'GetZoneRequest', - 'ListActionsResponse', - 'ListAssetActionsRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListLakeActionsRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'ListZoneActionsRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'OperationMetadata', - 'RunTaskRequest', - 'RunTaskResponse', - 'UpdateAssetRequest', - 'UpdateEnvironmentRequest', - 'UpdateLakeRequest', - 'UpdateTaskRequest', - 'UpdateZoneRequest', - 'Job', - 'Task', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed deleted file mode 100644 index c932c263028e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py deleted file mode 100644 index 72503aaffdf6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/__init__.py +++ /dev/null @@ -1,486 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataplex_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.business_glossary_service import BusinessGlossaryServiceClient -from .services.business_glossary_service import BusinessGlossaryServiceAsyncClient -from .services.catalog_service import CatalogServiceClient -from .services.catalog_service import CatalogServiceAsyncClient -from .services.cmek_service import CmekServiceClient -from .services.cmek_service import CmekServiceAsyncClient -from .services.content_service import ContentServiceClient -from .services.content_service import ContentServiceAsyncClient -from .services.dataplex_service import DataplexServiceClient -from .services.dataplex_service import DataplexServiceAsyncClient -from .services.data_scan_service import DataScanServiceClient -from .services.data_scan_service import DataScanServiceAsyncClient -from .services.data_taxonomy_service import DataTaxonomyServiceClient -from .services.data_taxonomy_service import DataTaxonomyServiceAsyncClient -from .services.metadata_service import MetadataServiceClient -from .services.metadata_service import MetadataServiceAsyncClient - -from .types.analyze import Content -from .types.analyze import Environment -from .types.analyze import Session -from .types.business_glossary import CreateGlossaryCategoryRequest -from .types.business_glossary import CreateGlossaryRequest -from .types.business_glossary import CreateGlossaryTermRequest -from .types.business_glossary import DeleteGlossaryCategoryRequest -from .types.business_glossary import DeleteGlossaryRequest -from .types.business_glossary import DeleteGlossaryTermRequest -from .types.business_glossary import GetGlossaryCategoryRequest -from .types.business_glossary import GetGlossaryRequest -from .types.business_glossary import GetGlossaryTermRequest -from .types.business_glossary import Glossary -from .types.business_glossary import GlossaryCategory -from .types.business_glossary import GlossaryTerm -from .types.business_glossary import ListGlossariesRequest -from .types.business_glossary import ListGlossariesResponse -from .types.business_glossary import ListGlossaryCategoriesRequest -from .types.business_glossary import ListGlossaryCategoriesResponse -from .types.business_glossary import ListGlossaryTermsRequest -from .types.business_glossary import ListGlossaryTermsResponse -from .types.business_glossary import UpdateGlossaryCategoryRequest -from .types.business_glossary import UpdateGlossaryRequest -from .types.business_glossary import UpdateGlossaryTermRequest -from .types.catalog import Aspect -from .types.catalog import AspectSource -from .types.catalog import AspectType -from .types.catalog import CancelMetadataJobRequest -from .types.catalog import CreateAspectTypeRequest -from .types.catalog import CreateEntryGroupRequest -from .types.catalog import CreateEntryLinkRequest -from .types.catalog import CreateEntryRequest -from .types.catalog import CreateEntryTypeRequest -from .types.catalog import CreateMetadataJobRequest -from .types.catalog import DeleteAspectTypeRequest -from .types.catalog import DeleteEntryGroupRequest -from .types.catalog import DeleteEntryLinkRequest -from .types.catalog import DeleteEntryRequest -from .types.catalog import DeleteEntryTypeRequest -from .types.catalog import Entry -from .types.catalog import EntryGroup -from .types.catalog import EntryLink -from .types.catalog import EntrySource -from .types.catalog import EntryType -from .types.catalog import GetAspectTypeRequest -from .types.catalog import GetEntryGroupRequest -from .types.catalog import GetEntryLinkRequest -from .types.catalog import GetEntryRequest -from .types.catalog import GetEntryTypeRequest -from .types.catalog import GetMetadataJobRequest -from .types.catalog import ImportItem -from .types.catalog import ListAspectTypesRequest -from .types.catalog import ListAspectTypesResponse -from .types.catalog import ListEntriesRequest -from .types.catalog import ListEntriesResponse -from .types.catalog import ListEntryGroupsRequest -from .types.catalog import ListEntryGroupsResponse -from .types.catalog import ListEntryTypesRequest -from .types.catalog import ListEntryTypesResponse -from .types.catalog import ListMetadataJobsRequest -from .types.catalog import ListMetadataJobsResponse -from .types.catalog import LookupEntryRequest -from .types.catalog import MetadataJob -from .types.catalog import SearchEntriesRequest -from .types.catalog import SearchEntriesResponse -from .types.catalog import SearchEntriesResult -from .types.catalog import UpdateAspectTypeRequest -from .types.catalog import UpdateEntryGroupRequest -from .types.catalog import UpdateEntryRequest -from .types.catalog import UpdateEntryTypeRequest -from .types.catalog import EntryView -from .types.catalog import TransferStatus -from .types.cmek import CreateEncryptionConfigRequest -from .types.cmek import DeleteEncryptionConfigRequest -from .types.cmek import EncryptionConfig -from .types.cmek import GetEncryptionConfigRequest -from .types.cmek import ListEncryptionConfigsRequest -from .types.cmek import ListEncryptionConfigsResponse -from .types.cmek import UpdateEncryptionConfigRequest -from .types.content import CreateContentRequest -from .types.content import DeleteContentRequest -from .types.content import GetContentRequest -from .types.content import ListContentRequest -from .types.content import ListContentResponse -from .types.content import UpdateContentRequest -from .types.data_discovery import DataDiscoveryResult -from .types.data_discovery import DataDiscoverySpec -from .types.data_profile import DataProfileResult -from .types.data_profile import DataProfileSpec -from .types.data_quality import DataQualityColumnResult -from .types.data_quality import DataQualityDimension -from .types.data_quality import DataQualityDimensionResult -from .types.data_quality import DataQualityResult -from .types.data_quality import DataQualityRule -from .types.data_quality import DataQualityRuleResult -from .types.data_quality import DataQualitySpec -from .types.data_taxonomy import CreateDataAttributeBindingRequest -from .types.data_taxonomy import CreateDataAttributeRequest -from .types.data_taxonomy import CreateDataTaxonomyRequest -from .types.data_taxonomy import DataAttribute -from .types.data_taxonomy import DataAttributeBinding -from .types.data_taxonomy import DataTaxonomy -from .types.data_taxonomy import DeleteDataAttributeBindingRequest -from .types.data_taxonomy import DeleteDataAttributeRequest -from .types.data_taxonomy import DeleteDataTaxonomyRequest -from .types.data_taxonomy import GetDataAttributeBindingRequest -from .types.data_taxonomy import GetDataAttributeRequest -from .types.data_taxonomy import GetDataTaxonomyRequest -from .types.data_taxonomy import ListDataAttributeBindingsRequest -from .types.data_taxonomy import ListDataAttributeBindingsResponse -from .types.data_taxonomy import ListDataAttributesRequest -from .types.data_taxonomy import ListDataAttributesResponse -from .types.data_taxonomy import ListDataTaxonomiesRequest -from .types.data_taxonomy import ListDataTaxonomiesResponse -from .types.data_taxonomy import UpdateDataAttributeBindingRequest -from .types.data_taxonomy import UpdateDataAttributeRequest -from .types.data_taxonomy import UpdateDataTaxonomyRequest -from .types.datascans import CreateDataScanRequest -from .types.datascans import DataScan -from .types.datascans import DataScanJob -from .types.datascans import DeleteDataScanRequest -from .types.datascans import GenerateDataQualityRulesRequest -from .types.datascans import GenerateDataQualityRulesResponse -from .types.datascans import GetDataScanJobRequest -from .types.datascans import GetDataScanRequest -from .types.datascans import ListDataScanJobsRequest -from .types.datascans import ListDataScanJobsResponse -from .types.datascans import ListDataScansRequest -from .types.datascans import ListDataScansResponse -from .types.datascans import RunDataScanRequest -from .types.datascans import RunDataScanResponse -from .types.datascans import UpdateDataScanRequest -from .types.datascans import DataScanType -from .types.datascans_common import DataScanCatalogPublishingStatus -from .types.logs import BusinessGlossaryEvent -from .types.logs import DataQualityScanRuleResult -from .types.logs import DataScanEvent -from .types.logs import DiscoveryEvent -from .types.logs import EntryLinkEvent -from .types.logs import GovernanceEvent -from .types.logs import JobEvent -from .types.logs import SessionEvent -from .types.metadata_ import CreateEntityRequest -from .types.metadata_ import CreatePartitionRequest -from .types.metadata_ import DeleteEntityRequest -from .types.metadata_ import DeletePartitionRequest -from .types.metadata_ import Entity -from .types.metadata_ import GetEntityRequest -from .types.metadata_ import GetPartitionRequest -from .types.metadata_ import ListEntitiesRequest -from .types.metadata_ import ListEntitiesResponse -from .types.metadata_ import ListPartitionsRequest -from .types.metadata_ import ListPartitionsResponse -from .types.metadata_ import Partition -from .types.metadata_ import Schema -from .types.metadata_ import StorageAccess -from .types.metadata_ import StorageFormat -from .types.metadata_ import UpdateEntityRequest -from .types.metadata_ import StorageSystem -from .types.processing import DataSource -from .types.processing import ScannedData -from .types.processing import Trigger -from .types.resources import Action -from .types.resources import Asset -from .types.resources import AssetStatus -from .types.resources import Lake -from .types.resources import Zone -from .types.resources import State -from .types.security import DataAccessSpec -from .types.security import ResourceAccessSpec -from .types.service import CancelJobRequest -from .types.service import CreateAssetRequest -from .types.service import CreateEnvironmentRequest -from .types.service import CreateLakeRequest -from .types.service import CreateTaskRequest -from .types.service import CreateZoneRequest -from .types.service import DeleteAssetRequest -from .types.service import DeleteEnvironmentRequest -from .types.service import DeleteLakeRequest -from .types.service import DeleteTaskRequest -from .types.service import DeleteZoneRequest -from .types.service import GetAssetRequest -from .types.service import GetEnvironmentRequest -from .types.service import GetJobRequest -from .types.service import GetLakeRequest -from .types.service import GetTaskRequest -from .types.service import GetZoneRequest -from .types.service import ListActionsResponse -from .types.service import ListAssetActionsRequest -from .types.service import ListAssetsRequest -from .types.service import ListAssetsResponse -from .types.service import ListEnvironmentsRequest -from .types.service import ListEnvironmentsResponse -from .types.service import ListJobsRequest -from .types.service import ListJobsResponse -from .types.service import ListLakeActionsRequest -from .types.service import ListLakesRequest -from .types.service import ListLakesResponse -from .types.service import ListSessionsRequest -from .types.service import ListSessionsResponse -from .types.service import ListTasksRequest -from .types.service import ListTasksResponse -from .types.service import ListZoneActionsRequest -from .types.service import ListZonesRequest -from .types.service import ListZonesResponse -from .types.service import OperationMetadata -from .types.service import RunTaskRequest -from .types.service import RunTaskResponse -from .types.service import UpdateAssetRequest -from .types.service import UpdateEnvironmentRequest -from .types.service import UpdateLakeRequest -from .types.service import UpdateTaskRequest -from .types.service import UpdateZoneRequest -from .types.tasks import Job -from .types.tasks import Task - -__all__ = ( - 'BusinessGlossaryServiceAsyncClient', - 'CatalogServiceAsyncClient', - 'CmekServiceAsyncClient', - 'ContentServiceAsyncClient', - 'DataScanServiceAsyncClient', - 'DataTaxonomyServiceAsyncClient', - 'DataplexServiceAsyncClient', - 'MetadataServiceAsyncClient', -'Action', -'Aspect', -'AspectSource', -'AspectType', -'Asset', -'AssetStatus', -'BusinessGlossaryEvent', -'BusinessGlossaryServiceClient', -'CancelJobRequest', -'CancelMetadataJobRequest', -'CatalogServiceClient', -'CmekServiceClient', -'Content', -'ContentServiceClient', -'CreateAspectTypeRequest', -'CreateAssetRequest', -'CreateContentRequest', -'CreateDataAttributeBindingRequest', -'CreateDataAttributeRequest', -'CreateDataScanRequest', -'CreateDataTaxonomyRequest', -'CreateEncryptionConfigRequest', -'CreateEntityRequest', -'CreateEntryGroupRequest', -'CreateEntryLinkRequest', -'CreateEntryRequest', -'CreateEntryTypeRequest', -'CreateEnvironmentRequest', -'CreateGlossaryCategoryRequest', -'CreateGlossaryRequest', -'CreateGlossaryTermRequest', -'CreateLakeRequest', -'CreateMetadataJobRequest', -'CreatePartitionRequest', -'CreateTaskRequest', -'CreateZoneRequest', -'DataAccessSpec', -'DataAttribute', -'DataAttributeBinding', -'DataDiscoveryResult', -'DataDiscoverySpec', -'DataProfileResult', -'DataProfileSpec', -'DataQualityColumnResult', -'DataQualityDimension', -'DataQualityDimensionResult', -'DataQualityResult', -'DataQualityRule', -'DataQualityRuleResult', -'DataQualityScanRuleResult', -'DataQualitySpec', -'DataScan', -'DataScanCatalogPublishingStatus', -'DataScanEvent', -'DataScanJob', -'DataScanServiceClient', -'DataScanType', -'DataSource', -'DataTaxonomy', -'DataTaxonomyServiceClient', -'DataplexServiceClient', -'DeleteAspectTypeRequest', -'DeleteAssetRequest', -'DeleteContentRequest', -'DeleteDataAttributeBindingRequest', -'DeleteDataAttributeRequest', -'DeleteDataScanRequest', -'DeleteDataTaxonomyRequest', -'DeleteEncryptionConfigRequest', -'DeleteEntityRequest', -'DeleteEntryGroupRequest', -'DeleteEntryLinkRequest', -'DeleteEntryRequest', -'DeleteEntryTypeRequest', -'DeleteEnvironmentRequest', -'DeleteGlossaryCategoryRequest', -'DeleteGlossaryRequest', -'DeleteGlossaryTermRequest', -'DeleteLakeRequest', -'DeletePartitionRequest', -'DeleteTaskRequest', -'DeleteZoneRequest', -'DiscoveryEvent', -'EncryptionConfig', -'Entity', -'Entry', -'EntryGroup', -'EntryLink', -'EntryLinkEvent', -'EntrySource', -'EntryType', -'EntryView', -'Environment', -'GenerateDataQualityRulesRequest', -'GenerateDataQualityRulesResponse', -'GetAspectTypeRequest', -'GetAssetRequest', -'GetContentRequest', -'GetDataAttributeBindingRequest', -'GetDataAttributeRequest', -'GetDataScanJobRequest', -'GetDataScanRequest', -'GetDataTaxonomyRequest', -'GetEncryptionConfigRequest', -'GetEntityRequest', -'GetEntryGroupRequest', -'GetEntryLinkRequest', -'GetEntryRequest', -'GetEntryTypeRequest', -'GetEnvironmentRequest', -'GetGlossaryCategoryRequest', -'GetGlossaryRequest', -'GetGlossaryTermRequest', -'GetJobRequest', -'GetLakeRequest', -'GetMetadataJobRequest', -'GetPartitionRequest', -'GetTaskRequest', -'GetZoneRequest', -'Glossary', -'GlossaryCategory', -'GlossaryTerm', -'GovernanceEvent', -'ImportItem', -'Job', -'JobEvent', -'Lake', -'ListActionsResponse', -'ListAspectTypesRequest', -'ListAspectTypesResponse', -'ListAssetActionsRequest', -'ListAssetsRequest', -'ListAssetsResponse', -'ListContentRequest', -'ListContentResponse', -'ListDataAttributeBindingsRequest', -'ListDataAttributeBindingsResponse', -'ListDataAttributesRequest', -'ListDataAttributesResponse', -'ListDataScanJobsRequest', -'ListDataScanJobsResponse', -'ListDataScansRequest', -'ListDataScansResponse', -'ListDataTaxonomiesRequest', -'ListDataTaxonomiesResponse', -'ListEncryptionConfigsRequest', -'ListEncryptionConfigsResponse', -'ListEntitiesRequest', -'ListEntitiesResponse', -'ListEntriesRequest', -'ListEntriesResponse', -'ListEntryGroupsRequest', -'ListEntryGroupsResponse', -'ListEntryTypesRequest', -'ListEntryTypesResponse', -'ListEnvironmentsRequest', -'ListEnvironmentsResponse', -'ListGlossariesRequest', -'ListGlossariesResponse', -'ListGlossaryCategoriesRequest', -'ListGlossaryCategoriesResponse', -'ListGlossaryTermsRequest', -'ListGlossaryTermsResponse', -'ListJobsRequest', -'ListJobsResponse', -'ListLakeActionsRequest', -'ListLakesRequest', -'ListLakesResponse', -'ListMetadataJobsRequest', -'ListMetadataJobsResponse', -'ListPartitionsRequest', -'ListPartitionsResponse', -'ListSessionsRequest', -'ListSessionsResponse', -'ListTasksRequest', -'ListTasksResponse', -'ListZoneActionsRequest', -'ListZonesRequest', -'ListZonesResponse', -'LookupEntryRequest', -'MetadataJob', -'MetadataServiceClient', -'OperationMetadata', -'Partition', -'ResourceAccessSpec', -'RunDataScanRequest', -'RunDataScanResponse', -'RunTaskRequest', -'RunTaskResponse', -'ScannedData', -'Schema', -'SearchEntriesRequest', -'SearchEntriesResponse', -'SearchEntriesResult', -'Session', -'SessionEvent', -'State', -'StorageAccess', -'StorageFormat', -'StorageSystem', -'Task', -'TransferStatus', -'Trigger', -'UpdateAspectTypeRequest', -'UpdateAssetRequest', -'UpdateContentRequest', -'UpdateDataAttributeBindingRequest', -'UpdateDataAttributeRequest', -'UpdateDataScanRequest', -'UpdateDataTaxonomyRequest', -'UpdateEncryptionConfigRequest', -'UpdateEntityRequest', -'UpdateEntryGroupRequest', -'UpdateEntryRequest', -'UpdateEntryTypeRequest', -'UpdateEnvironmentRequest', -'UpdateGlossaryCategoryRequest', -'UpdateGlossaryRequest', -'UpdateGlossaryTermRequest', -'UpdateLakeRequest', -'UpdateTaskRequest', -'UpdateZoneRequest', -'Zone', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json deleted file mode 100644 index dd1090ea5c40..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_metadata.json +++ /dev/null @@ -1,2006 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dataplex_v1", - "protoPackage": "google.cloud.dataplex.v1", - "schema": "1.0", - "services": { - "BusinessGlossaryService": { - "clients": { - "grpc": { - "libraryClient": "BusinessGlossaryServiceClient", - "rpcs": { - "CreateGlossary": { - "methods": [ - "create_glossary" - ] - }, - "CreateGlossaryCategory": { - "methods": [ - "create_glossary_category" - ] - }, - "CreateGlossaryTerm": { - "methods": [ - "create_glossary_term" - ] - }, - "DeleteGlossary": { - "methods": [ - "delete_glossary" - ] - }, - "DeleteGlossaryCategory": { - "methods": [ - "delete_glossary_category" - ] - }, - "DeleteGlossaryTerm": { - "methods": [ - "delete_glossary_term" - ] - }, - "GetGlossary": { - "methods": [ - "get_glossary" - ] - }, - "GetGlossaryCategory": { - "methods": [ - "get_glossary_category" - ] - }, - "GetGlossaryTerm": { - "methods": [ - "get_glossary_term" - ] - }, - "ListGlossaries": { - "methods": [ - "list_glossaries" - ] - }, - "ListGlossaryCategories": { - "methods": [ - "list_glossary_categories" - ] - }, - "ListGlossaryTerms": { - "methods": [ - "list_glossary_terms" - ] - }, - "UpdateGlossary": { - "methods": [ - "update_glossary" - ] - }, - "UpdateGlossaryCategory": { - "methods": [ - "update_glossary_category" - ] - }, - "UpdateGlossaryTerm": { - "methods": [ - "update_glossary_term" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BusinessGlossaryServiceAsyncClient", - "rpcs": { - "CreateGlossary": { - "methods": [ - "create_glossary" - ] - }, - "CreateGlossaryCategory": { - "methods": [ - "create_glossary_category" - ] - }, - "CreateGlossaryTerm": { - "methods": [ - "create_glossary_term" - ] - }, - "DeleteGlossary": { - "methods": [ - "delete_glossary" - ] - }, - "DeleteGlossaryCategory": { - "methods": [ - "delete_glossary_category" - ] - }, - "DeleteGlossaryTerm": { - "methods": [ - "delete_glossary_term" - ] - }, - "GetGlossary": { - "methods": [ - "get_glossary" - ] - }, - "GetGlossaryCategory": { - "methods": [ - "get_glossary_category" - ] - }, - "GetGlossaryTerm": { - "methods": [ - "get_glossary_term" - ] - }, - "ListGlossaries": { - "methods": [ - "list_glossaries" - ] - }, - "ListGlossaryCategories": { - "methods": [ - "list_glossary_categories" - ] - }, - "ListGlossaryTerms": { - "methods": [ - "list_glossary_terms" - ] - }, - "UpdateGlossary": { - "methods": [ - "update_glossary" - ] - }, - "UpdateGlossaryCategory": { - "methods": [ - "update_glossary_category" - ] - }, - "UpdateGlossaryTerm": { - "methods": [ - "update_glossary_term" - ] - } - } - }, - "rest": { - "libraryClient": "BusinessGlossaryServiceClient", - "rpcs": { - "CreateGlossary": { - "methods": [ - "create_glossary" - ] - }, - "CreateGlossaryCategory": { - "methods": [ - "create_glossary_category" - ] - }, - "CreateGlossaryTerm": { - "methods": [ - "create_glossary_term" - ] - }, - "DeleteGlossary": { - "methods": [ - "delete_glossary" - ] - }, - "DeleteGlossaryCategory": { - "methods": [ - "delete_glossary_category" - ] - }, - "DeleteGlossaryTerm": { - "methods": [ - "delete_glossary_term" - ] - }, - "GetGlossary": { - "methods": [ - "get_glossary" - ] - }, - "GetGlossaryCategory": { - "methods": [ - "get_glossary_category" - ] - }, - "GetGlossaryTerm": { - "methods": [ - "get_glossary_term" - ] - }, - "ListGlossaries": { - "methods": [ - "list_glossaries" - ] - }, - "ListGlossaryCategories": { - "methods": [ - "list_glossary_categories" - ] - }, - "ListGlossaryTerms": { - "methods": [ - "list_glossary_terms" - ] - }, - "UpdateGlossary": { - "methods": [ - "update_glossary" - ] - }, - "UpdateGlossaryCategory": { - "methods": [ - "update_glossary_category" - ] - }, - "UpdateGlossaryTerm": { - "methods": [ - "update_glossary_term" - ] - } - } - } - } - }, - "CatalogService": { - "clients": { - "grpc": { - "libraryClient": "CatalogServiceClient", - "rpcs": { - "CancelMetadataJob": { - "methods": [ - "cancel_metadata_job" - ] - }, - "CreateAspectType": { - "methods": [ - "create_aspect_type" - ] - }, - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateEntryLink": { - "methods": [ - "create_entry_link" - ] - }, - "CreateEntryType": { - "methods": [ - "create_entry_type" - ] - }, - "CreateMetadataJob": { - "methods": [ - "create_metadata_job" - ] - }, - "DeleteAspectType": { - "methods": [ - "delete_aspect_type" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteEntryLink": { - "methods": [ - "delete_entry_link" - ] - }, - "DeleteEntryType": { - "methods": [ - "delete_entry_type" - ] - }, - "GetAspectType": { - "methods": [ - "get_aspect_type" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetEntryLink": { - "methods": [ - "get_entry_link" - ] - }, - "GetEntryType": { - "methods": [ - "get_entry_type" - ] - }, - "GetMetadataJob": { - "methods": [ - "get_metadata_job" - ] - }, - "ListAspectTypes": { - "methods": [ - "list_aspect_types" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListEntryTypes": { - "methods": [ - "list_entry_types" - ] - }, - "ListMetadataJobs": { - "methods": [ - "list_metadata_jobs" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "SearchEntries": { - "methods": [ - "search_entries" - ] - }, - "UpdateAspectType": { - "methods": [ - "update_aspect_type" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateEntryType": { - "methods": [ - "update_entry_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CatalogServiceAsyncClient", - "rpcs": { - "CancelMetadataJob": { - "methods": [ - "cancel_metadata_job" - ] - }, - "CreateAspectType": { - "methods": [ - "create_aspect_type" - ] - }, - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateEntryLink": { - "methods": [ - "create_entry_link" - ] - }, - "CreateEntryType": { - "methods": [ - "create_entry_type" - ] - }, - "CreateMetadataJob": { - "methods": [ - "create_metadata_job" - ] - }, - "DeleteAspectType": { - "methods": [ - "delete_aspect_type" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteEntryLink": { - "methods": [ - "delete_entry_link" - ] - }, - "DeleteEntryType": { - "methods": [ - "delete_entry_type" - ] - }, - "GetAspectType": { - "methods": [ - "get_aspect_type" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetEntryLink": { - "methods": [ - "get_entry_link" - ] - }, - "GetEntryType": { - "methods": [ - "get_entry_type" - ] - }, - "GetMetadataJob": { - "methods": [ - "get_metadata_job" - ] - }, - "ListAspectTypes": { - "methods": [ - "list_aspect_types" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListEntryTypes": { - "methods": [ - "list_entry_types" - ] - }, - "ListMetadataJobs": { - "methods": [ - "list_metadata_jobs" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "SearchEntries": { - "methods": [ - "search_entries" - ] - }, - "UpdateAspectType": { - "methods": [ - "update_aspect_type" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateEntryType": { - "methods": [ - "update_entry_type" - ] - } - } - }, - "rest": { - "libraryClient": "CatalogServiceClient", - "rpcs": { - "CancelMetadataJob": { - "methods": [ - "cancel_metadata_job" - ] - }, - "CreateAspectType": { - "methods": [ - "create_aspect_type" - ] - }, - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateEntryLink": { - "methods": [ - "create_entry_link" - ] - }, - "CreateEntryType": { - "methods": [ - "create_entry_type" - ] - }, - "CreateMetadataJob": { - "methods": [ - "create_metadata_job" - ] - }, - "DeleteAspectType": { - "methods": [ - "delete_aspect_type" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteEntryLink": { - "methods": [ - "delete_entry_link" - ] - }, - "DeleteEntryType": { - "methods": [ - "delete_entry_type" - ] - }, - "GetAspectType": { - "methods": [ - "get_aspect_type" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetEntryLink": { - "methods": [ - "get_entry_link" - ] - }, - "GetEntryType": { - "methods": [ - "get_entry_type" - ] - }, - "GetMetadataJob": { - "methods": [ - "get_metadata_job" - ] - }, - "ListAspectTypes": { - "methods": [ - "list_aspect_types" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListEntryTypes": { - "methods": [ - "list_entry_types" - ] - }, - "ListMetadataJobs": { - "methods": [ - "list_metadata_jobs" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "SearchEntries": { - "methods": [ - "search_entries" - ] - }, - "UpdateAspectType": { - "methods": [ - "update_aspect_type" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateEntryType": { - "methods": [ - "update_entry_type" - ] - } - } - } - } - }, - "CmekService": { - "clients": { - "grpc": { - "libraryClient": "CmekServiceClient", - "rpcs": { - "CreateEncryptionConfig": { - "methods": [ - "create_encryption_config" - ] - }, - "DeleteEncryptionConfig": { - "methods": [ - "delete_encryption_config" - ] - }, - "GetEncryptionConfig": { - "methods": [ - "get_encryption_config" - ] - }, - "ListEncryptionConfigs": { - "methods": [ - "list_encryption_configs" - ] - }, - "UpdateEncryptionConfig": { - "methods": [ - "update_encryption_config" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CmekServiceAsyncClient", - "rpcs": { - "CreateEncryptionConfig": { - "methods": [ - "create_encryption_config" - ] - }, - "DeleteEncryptionConfig": { - "methods": [ - "delete_encryption_config" - ] - }, - "GetEncryptionConfig": { - "methods": [ - "get_encryption_config" - ] - }, - "ListEncryptionConfigs": { - "methods": [ - "list_encryption_configs" - ] - }, - "UpdateEncryptionConfig": { - "methods": [ - "update_encryption_config" - ] - } - } - }, - "rest": { - "libraryClient": "CmekServiceClient", - "rpcs": { - "CreateEncryptionConfig": { - "methods": [ - "create_encryption_config" - ] - }, - "DeleteEncryptionConfig": { - "methods": [ - "delete_encryption_config" - ] - }, - "GetEncryptionConfig": { - "methods": [ - "get_encryption_config" - ] - }, - "ListEncryptionConfigs": { - "methods": [ - "list_encryption_configs" - ] - }, - "UpdateEncryptionConfig": { - "methods": [ - "update_encryption_config" - ] - } - } - } - } - }, - "ContentService": { - "clients": { - "grpc": { - "libraryClient": "ContentServiceClient", - "rpcs": { - "CreateContent": { - "methods": [ - "create_content" - ] - }, - "DeleteContent": { - "methods": [ - "delete_content" - ] - }, - "GetContent": { - "methods": [ - "get_content" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListContent": { - "methods": [ - "list_content" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateContent": { - "methods": [ - "update_content" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ContentServiceAsyncClient", - "rpcs": { - "CreateContent": { - "methods": [ - "create_content" - ] - }, - "DeleteContent": { - "methods": [ - "delete_content" - ] - }, - "GetContent": { - "methods": [ - "get_content" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListContent": { - "methods": [ - "list_content" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateContent": { - "methods": [ - "update_content" - ] - } - } - }, - "rest": { - "libraryClient": "ContentServiceClient", - "rpcs": { - "CreateContent": { - "methods": [ - "create_content" - ] - }, - "DeleteContent": { - "methods": [ - "delete_content" - ] - }, - "GetContent": { - "methods": [ - "get_content" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListContent": { - "methods": [ - "list_content" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateContent": { - "methods": [ - "update_content" - ] - } - } - } - } - }, - "DataScanService": { - "clients": { - "grpc": { - "libraryClient": "DataScanServiceClient", - "rpcs": { - "CreateDataScan": { - "methods": [ - "create_data_scan" - ] - }, - "DeleteDataScan": { - "methods": [ - "delete_data_scan" - ] - }, - "GenerateDataQualityRules": { - "methods": [ - "generate_data_quality_rules" - ] - }, - "GetDataScan": { - "methods": [ - "get_data_scan" - ] - }, - "GetDataScanJob": { - "methods": [ - "get_data_scan_job" - ] - }, - "ListDataScanJobs": { - "methods": [ - "list_data_scan_jobs" - ] - }, - "ListDataScans": { - "methods": [ - "list_data_scans" - ] - }, - "RunDataScan": { - "methods": [ - "run_data_scan" - ] - }, - "UpdateDataScan": { - "methods": [ - "update_data_scan" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataScanServiceAsyncClient", - "rpcs": { - "CreateDataScan": { - "methods": [ - "create_data_scan" - ] - }, - "DeleteDataScan": { - "methods": [ - "delete_data_scan" - ] - }, - "GenerateDataQualityRules": { - "methods": [ - "generate_data_quality_rules" - ] - }, - "GetDataScan": { - "methods": [ - "get_data_scan" - ] - }, - "GetDataScanJob": { - "methods": [ - "get_data_scan_job" - ] - }, - "ListDataScanJobs": { - "methods": [ - "list_data_scan_jobs" - ] - }, - "ListDataScans": { - "methods": [ - "list_data_scans" - ] - }, - "RunDataScan": { - "methods": [ - "run_data_scan" - ] - }, - "UpdateDataScan": { - "methods": [ - "update_data_scan" - ] - } - } - }, - "rest": { - "libraryClient": "DataScanServiceClient", - "rpcs": { - "CreateDataScan": { - "methods": [ - "create_data_scan" - ] - }, - "DeleteDataScan": { - "methods": [ - "delete_data_scan" - ] - }, - "GenerateDataQualityRules": { - "methods": [ - "generate_data_quality_rules" - ] - }, - "GetDataScan": { - "methods": [ - "get_data_scan" - ] - }, - "GetDataScanJob": { - "methods": [ - "get_data_scan_job" - ] - }, - "ListDataScanJobs": { - "methods": [ - "list_data_scan_jobs" - ] - }, - "ListDataScans": { - "methods": [ - "list_data_scans" - ] - }, - "RunDataScan": { - "methods": [ - "run_data_scan" - ] - }, - "UpdateDataScan": { - "methods": [ - "update_data_scan" - ] - } - } - } - } - }, - "DataTaxonomyService": { - "clients": { - "grpc": { - "libraryClient": "DataTaxonomyServiceClient", - "rpcs": { - "CreateDataAttribute": { - "methods": [ - "create_data_attribute" - ] - }, - "CreateDataAttributeBinding": { - "methods": [ - "create_data_attribute_binding" - ] - }, - "CreateDataTaxonomy": { - "methods": [ - "create_data_taxonomy" - ] - }, - "DeleteDataAttribute": { - "methods": [ - "delete_data_attribute" - ] - }, - "DeleteDataAttributeBinding": { - "methods": [ - "delete_data_attribute_binding" - ] - }, - "DeleteDataTaxonomy": { - "methods": [ - "delete_data_taxonomy" - ] - }, - "GetDataAttribute": { - "methods": [ - "get_data_attribute" - ] - }, - "GetDataAttributeBinding": { - "methods": [ - "get_data_attribute_binding" - ] - }, - "GetDataTaxonomy": { - "methods": [ - "get_data_taxonomy" - ] - }, - "ListDataAttributeBindings": { - "methods": [ - "list_data_attribute_bindings" - ] - }, - "ListDataAttributes": { - "methods": [ - "list_data_attributes" - ] - }, - "ListDataTaxonomies": { - "methods": [ - "list_data_taxonomies" - ] - }, - "UpdateDataAttribute": { - "methods": [ - "update_data_attribute" - ] - }, - "UpdateDataAttributeBinding": { - "methods": [ - "update_data_attribute_binding" - ] - }, - "UpdateDataTaxonomy": { - "methods": [ - "update_data_taxonomy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataTaxonomyServiceAsyncClient", - "rpcs": { - "CreateDataAttribute": { - "methods": [ - "create_data_attribute" - ] - }, - "CreateDataAttributeBinding": { - "methods": [ - "create_data_attribute_binding" - ] - }, - "CreateDataTaxonomy": { - "methods": [ - "create_data_taxonomy" - ] - }, - "DeleteDataAttribute": { - "methods": [ - "delete_data_attribute" - ] - }, - "DeleteDataAttributeBinding": { - "methods": [ - "delete_data_attribute_binding" - ] - }, - "DeleteDataTaxonomy": { - "methods": [ - "delete_data_taxonomy" - ] - }, - "GetDataAttribute": { - "methods": [ - "get_data_attribute" - ] - }, - "GetDataAttributeBinding": { - "methods": [ - "get_data_attribute_binding" - ] - }, - "GetDataTaxonomy": { - "methods": [ - "get_data_taxonomy" - ] - }, - "ListDataAttributeBindings": { - "methods": [ - "list_data_attribute_bindings" - ] - }, - "ListDataAttributes": { - "methods": [ - "list_data_attributes" - ] - }, - "ListDataTaxonomies": { - "methods": [ - "list_data_taxonomies" - ] - }, - "UpdateDataAttribute": { - "methods": [ - "update_data_attribute" - ] - }, - "UpdateDataAttributeBinding": { - "methods": [ - "update_data_attribute_binding" - ] - }, - "UpdateDataTaxonomy": { - "methods": [ - "update_data_taxonomy" - ] - } - } - }, - "rest": { - "libraryClient": "DataTaxonomyServiceClient", - "rpcs": { - "CreateDataAttribute": { - "methods": [ - "create_data_attribute" - ] - }, - "CreateDataAttributeBinding": { - "methods": [ - "create_data_attribute_binding" - ] - }, - "CreateDataTaxonomy": { - "methods": [ - "create_data_taxonomy" - ] - }, - "DeleteDataAttribute": { - "methods": [ - "delete_data_attribute" - ] - }, - "DeleteDataAttributeBinding": { - "methods": [ - "delete_data_attribute_binding" - ] - }, - "DeleteDataTaxonomy": { - "methods": [ - "delete_data_taxonomy" - ] - }, - "GetDataAttribute": { - "methods": [ - "get_data_attribute" - ] - }, - "GetDataAttributeBinding": { - "methods": [ - "get_data_attribute_binding" - ] - }, - "GetDataTaxonomy": { - "methods": [ - "get_data_taxonomy" - ] - }, - "ListDataAttributeBindings": { - "methods": [ - "list_data_attribute_bindings" - ] - }, - "ListDataAttributes": { - "methods": [ - "list_data_attributes" - ] - }, - "ListDataTaxonomies": { - "methods": [ - "list_data_taxonomies" - ] - }, - "UpdateDataAttribute": { - "methods": [ - "update_data_attribute" - ] - }, - "UpdateDataAttributeBinding": { - "methods": [ - "update_data_attribute_binding" - ] - }, - "UpdateDataTaxonomy": { - "methods": [ - "update_data_taxonomy" - ] - } - } - } - } - }, - "DataplexService": { - "clients": { - "grpc": { - "libraryClient": "DataplexServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateAsset": { - "methods": [ - "create_asset" - ] - }, - "CreateEnvironment": { - "methods": [ - "create_environment" - ] - }, - "CreateLake": { - "methods": [ - "create_lake" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "CreateZone": { - "methods": [ - "create_zone" - ] - }, - "DeleteAsset": { - "methods": [ - "delete_asset" - ] - }, - "DeleteEnvironment": { - "methods": [ - "delete_environment" - ] - }, - "DeleteLake": { - "methods": [ - "delete_lake" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "DeleteZone": { - "methods": [ - "delete_zone" - ] - }, - "GetAsset": { - "methods": [ - "get_asset" - ] - }, - "GetEnvironment": { - "methods": [ - "get_environment" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetLake": { - "methods": [ - "get_lake" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "GetZone": { - "methods": [ - "get_zone" - ] - }, - "ListAssetActions": { - "methods": [ - "list_asset_actions" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListEnvironments": { - "methods": [ - "list_environments" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListLakeActions": { - "methods": [ - "list_lake_actions" - ] - }, - "ListLakes": { - "methods": [ - "list_lakes" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "ListZoneActions": { - "methods": [ - "list_zone_actions" - ] - }, - "ListZones": { - "methods": [ - "list_zones" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "UpdateAsset": { - "methods": [ - "update_asset" - ] - }, - "UpdateEnvironment": { - "methods": [ - "update_environment" - ] - }, - "UpdateLake": { - "methods": [ - "update_lake" - ] - }, - "UpdateTask": { - "methods": [ - "update_task" - ] - }, - "UpdateZone": { - "methods": [ - "update_zone" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataplexServiceAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateAsset": { - "methods": [ - "create_asset" - ] - }, - "CreateEnvironment": { - "methods": [ - "create_environment" - ] - }, - "CreateLake": { - "methods": [ - "create_lake" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "CreateZone": { - "methods": [ - "create_zone" - ] - }, - "DeleteAsset": { - "methods": [ - "delete_asset" - ] - }, - "DeleteEnvironment": { - "methods": [ - "delete_environment" - ] - }, - "DeleteLake": { - "methods": [ - "delete_lake" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "DeleteZone": { - "methods": [ - "delete_zone" - ] - }, - "GetAsset": { - "methods": [ - "get_asset" - ] - }, - "GetEnvironment": { - "methods": [ - "get_environment" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetLake": { - "methods": [ - "get_lake" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "GetZone": { - "methods": [ - "get_zone" - ] - }, - "ListAssetActions": { - "methods": [ - "list_asset_actions" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListEnvironments": { - "methods": [ - "list_environments" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListLakeActions": { - "methods": [ - "list_lake_actions" - ] - }, - "ListLakes": { - "methods": [ - "list_lakes" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "ListZoneActions": { - "methods": [ - "list_zone_actions" - ] - }, - "ListZones": { - "methods": [ - "list_zones" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "UpdateAsset": { - "methods": [ - "update_asset" - ] - }, - "UpdateEnvironment": { - "methods": [ - "update_environment" - ] - }, - "UpdateLake": { - "methods": [ - "update_lake" - ] - }, - "UpdateTask": { - "methods": [ - "update_task" - ] - }, - "UpdateZone": { - "methods": [ - "update_zone" - ] - } - } - }, - "rest": { - "libraryClient": "DataplexServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateAsset": { - "methods": [ - "create_asset" - ] - }, - "CreateEnvironment": { - "methods": [ - "create_environment" - ] - }, - "CreateLake": { - "methods": [ - "create_lake" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "CreateZone": { - "methods": [ - "create_zone" - ] - }, - "DeleteAsset": { - "methods": [ - "delete_asset" - ] - }, - "DeleteEnvironment": { - "methods": [ - "delete_environment" - ] - }, - "DeleteLake": { - "methods": [ - "delete_lake" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "DeleteZone": { - "methods": [ - "delete_zone" - ] - }, - "GetAsset": { - "methods": [ - "get_asset" - ] - }, - "GetEnvironment": { - "methods": [ - "get_environment" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetLake": { - "methods": [ - "get_lake" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "GetZone": { - "methods": [ - "get_zone" - ] - }, - "ListAssetActions": { - "methods": [ - "list_asset_actions" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListEnvironments": { - "methods": [ - "list_environments" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListLakeActions": { - "methods": [ - "list_lake_actions" - ] - }, - "ListLakes": { - "methods": [ - "list_lakes" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "ListZoneActions": { - "methods": [ - "list_zone_actions" - ] - }, - "ListZones": { - "methods": [ - "list_zones" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "UpdateAsset": { - "methods": [ - "update_asset" - ] - }, - "UpdateEnvironment": { - "methods": [ - "update_environment" - ] - }, - "UpdateLake": { - "methods": [ - "update_lake" - ] - }, - "UpdateTask": { - "methods": [ - "update_task" - ] - }, - "UpdateZone": { - "methods": [ - "update_zone" - ] - } - } - } - } - }, - "MetadataService": { - "clients": { - "grpc": { - "libraryClient": "MetadataServiceClient", - "rpcs": { - "CreateEntity": { - "methods": [ - "create_entity" - ] - }, - "CreatePartition": { - "methods": [ - "create_partition" - ] - }, - "DeleteEntity": { - "methods": [ - "delete_entity" - ] - }, - "DeletePartition": { - "methods": [ - "delete_partition" - ] - }, - "GetEntity": { - "methods": [ - "get_entity" - ] - }, - "GetPartition": { - "methods": [ - "get_partition" - ] - }, - "ListEntities": { - "methods": [ - "list_entities" - ] - }, - "ListPartitions": { - "methods": [ - "list_partitions" - ] - }, - "UpdateEntity": { - "methods": [ - "update_entity" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetadataServiceAsyncClient", - "rpcs": { - "CreateEntity": { - "methods": [ - "create_entity" - ] - }, - "CreatePartition": { - "methods": [ - "create_partition" - ] - }, - "DeleteEntity": { - "methods": [ - "delete_entity" - ] - }, - "DeletePartition": { - "methods": [ - "delete_partition" - ] - }, - "GetEntity": { - "methods": [ - "get_entity" - ] - }, - "GetPartition": { - "methods": [ - "get_partition" - ] - }, - "ListEntities": { - "methods": [ - "list_entities" - ] - }, - "ListPartitions": { - "methods": [ - "list_partitions" - ] - }, - "UpdateEntity": { - "methods": [ - "update_entity" - ] - } - } - }, - "rest": { - "libraryClient": "MetadataServiceClient", - "rpcs": { - "CreateEntity": { - "methods": [ - "create_entity" - ] - }, - "CreatePartition": { - "methods": [ - "create_partition" - ] - }, - "DeleteEntity": { - "methods": [ - "delete_entity" - ] - }, - "DeletePartition": { - "methods": [ - "delete_partition" - ] - }, - "GetEntity": { - "methods": [ - "get_entity" - ] - }, - "GetPartition": { - "methods": [ - "get_partition" - ] - }, - "ListEntities": { - "methods": [ - "list_entities" - ] - }, - "ListPartitions": { - "methods": [ - "list_partitions" - ] - }, - "UpdateEntity": { - "methods": [ - "update_entity" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b02..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed deleted file mode 100644 index c932c263028e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataplex package uses inline types. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py deleted file mode 100644 index cbf94b283c70..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py deleted file mode 100644 index 9143de05010c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CatalogServiceClient -from .async_client import CatalogServiceAsyncClient - -__all__ = ( - 'CatalogServiceClient', - 'CatalogServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py deleted file mode 100644 index dbd874963be6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ /dev/null @@ -1,4109 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport -from .client import CatalogServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class CatalogServiceAsyncClient: - """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. - They collectively let data administrators organize, manage, - secure, and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - """ - - _client: CatalogServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = CatalogServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = CatalogServiceClient._DEFAULT_UNIVERSE - - aspect_type_path = staticmethod(CatalogServiceClient.aspect_type_path) - parse_aspect_type_path = staticmethod(CatalogServiceClient.parse_aspect_type_path) - entry_path = staticmethod(CatalogServiceClient.entry_path) - parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) - entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) - parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) - entry_link_path = staticmethod(CatalogServiceClient.entry_link_path) - parse_entry_link_path = staticmethod(CatalogServiceClient.parse_entry_link_path) - entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) - parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) - glossary_path = staticmethod(CatalogServiceClient.glossary_path) - parse_glossary_path = staticmethod(CatalogServiceClient.parse_glossary_path) - metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) - parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) - common_billing_account_path = staticmethod(CatalogServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CatalogServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CatalogServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(CatalogServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(CatalogServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(CatalogServiceClient.parse_common_organization_path) - common_project_path = staticmethod(CatalogServiceClient.common_project_path) - parse_common_project_path = staticmethod(CatalogServiceClient.parse_common_project_path) - common_location_path = staticmethod(CatalogServiceClient.common_location_path) - parse_common_location_path = staticmethod(CatalogServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceAsyncClient: The constructed client. - """ - return CatalogServiceClient.from_service_account_info.__func__(CatalogServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceAsyncClient: The constructed client. - """ - return CatalogServiceClient.from_service_account_file.__func__(CatalogServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CatalogServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CatalogServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CatalogServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = CatalogServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the catalog service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CatalogServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CatalogServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.CatalogServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "credentialsType": None, - } - ) - - async def create_entry_type(self, - request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_type: Optional[catalog.EntryType] = None, - entry_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): - The request object. Create EntryType Request. - parent (:class:`str`): - Required. The resource name of the EntryType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type_id (:class:`str`): - Required. EntryType identifier. - This corresponds to the ``entry_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_type, entry_type_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryTypeRequest): - request = catalog.CreateEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_type is not None: - request.entry_type = entry_type - if entry_type_id is not None: - request.entry_type_id = entry_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_entry_type(self, - request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, - *, - entry_type: Optional[catalog.EntryType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): - The request object. Update EntryType Request. - entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry_type, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryTypeRequest): - request = catalog.UpdateEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_type is not None: - request.entry_type = entry_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_type.name", request.entry_type.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_entry_type(self, - request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delete EntryType Request. - name (:class:`str`): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryTypeRequest): - request = catalog.DeleteEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_entry_types(self, - request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntryTypesAsyncPager: - r"""Lists EntryType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]]): - The request object. List EntryTypes request - parent (:class:`str`): - Required. The resource name of the EntryType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: - List EntryTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryTypesRequest): - request = catalog.ListEntryTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryTypesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_type(self, - request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryType: - r"""Gets an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): - The request object. Get EntryType request. - name (:class:`str`): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryType: - Entry Type is a template for creating - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryTypeRequest): - request = catalog.GetEntryTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_aspect_type(self, - request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - aspect_type: Optional[catalog.AspectType] = None, - aspect_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): - The request object. Create AspectType Request. - parent (:class:`str`): - Required. The resource name of the AspectType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource. - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type_id (:class:`str`): - Required. AspectType identifier. - This corresponds to the ``aspect_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, aspect_type, aspect_type_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateAspectTypeRequest): - request = catalog.CreateAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if aspect_type is not None: - request.aspect_type = aspect_type - if aspect_type_id is not None: - request.aspect_type_id = aspect_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_aspect_type(self, - request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, - *, - aspect_type: Optional[catalog.AspectType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]]): - The request object. Update AspectType Request - aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [aspect_type, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateAspectTypeRequest): - request = catalog.UpdateAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if aspect_type is not None: - request.aspect_type = aspect_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("aspect_type.name", request.aspect_type.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_aspect_type(self, - request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delete AspectType Request. - name (:class:`str`): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteAspectTypeRequest): - request = catalog.DeleteAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_aspect_types(self, - request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAspectTypesAsyncPager: - r"""Lists AspectType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): - The request object. List AspectTypes request. - parent (:class:`str`): - Required. The resource name of the AspectType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: - List AspectTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListAspectTypesRequest): - request = catalog.ListAspectTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_aspect_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAspectTypesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_aspect_type(self, - request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.AspectType: - r"""Gets an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_aspect_type(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): - The request object. Get AspectType request. - name (:class:`str`): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.AspectType: - AspectType is a template for creating - Aspects, and represents the JSON-schema - for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetAspectTypeRequest): - request = catalog.GetAspectTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry_group(self, - request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group: Optional[catalog.EntryGroup] = None, - entry_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): - The request object. Create EntryGroup Request. - parent (:class:`str`): - Required. The resource name of the entryGroup, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (:class:`str`): - Required. EntryGroup identifier. - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_group, entry_group_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryGroupRequest): - request = catalog.CreateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group is not None: - request.entry_group = entry_group - if entry_group_id is not None: - request.entry_group_id = entry_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_entry_group(self, - request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[catalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): - The request object. Update EntryGroup Request. - entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry_group, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryGroupRequest): - request = catalog.UpdateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_entry_group(self, - request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): - The request object. Delete EntryGroup Request. - name (:class:`str`): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryGroupRequest): - request = catalog.DeleteEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_entry_groups(self, - request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntryGroupsAsyncPager: - r"""Lists EntryGroup resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]]): - The request object. List entryGroups request. - parent (:class:`str`): - Required. The resource name of the entryGroup location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: - List entry groups response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryGroupsRequest): - request = catalog.ListEntryGroupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryGroupsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_group(self, - request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]]): - The request object. Get EntryGroup request. - name (:class:`str`): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryGroup: - An Entry Group represents a logical - grouping of one or more Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryGroupRequest): - request = catalog.GetEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry(self, - request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry: Optional[catalog.Entry] = None, - entry_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Creates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): - The request object. Create Entry request. - parent (:class:`str`): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (:class:`google.cloud.dataplex_v1.types.Entry`): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (:class:`str`): - Required. Entry identifier. It has to be unique within - an Entry Group. - - Entries corresponding to Google Cloud resources use an - Entry ID format based on `full resource - names `__. - The format is a full resource name of the resource - without the prefix double slashes in the API service - name part of the full resource name. This allows - retrieval of entries using their associated resource - name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or - systems other than Google Cloud. - - The maximum size of the field is 4000 characters. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry, entry_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryRequest): - request = catalog.CreateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry is not None: - request.entry = entry - if entry_id is not None: - request.entry_id = entry_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry(self, - request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[catalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Updates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): - The request object. Update Entry request. - entry (:class:`google.cloud.dataplex_v1.types.Entry`): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Mask of fields to update. To update Aspects, - the update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryRequest): - request = catalog.UpdateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry(self, - request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Deletes an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): - The request object. Delete Entry request. - name (:class:`str`): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryRequest): - request = catalog.DeleteEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entries(self, - request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntriesAsyncPager: - r"""Lists Entries within an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): - The request object. List Entries request. - parent (:class:`str`): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: - List Entries response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntriesRequest): - request = catalog.ListEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntriesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry(self, - request: Optional[Union[catalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Gets an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): - The request object. Get Entry request. - name (:class:`str`): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryRequest): - request = catalog.GetEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def lookup_entry(self, - request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Looks up an entry by name using the permission on the - source system. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): - The request object. Lookup Entry request using - permissions in the source system. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.LookupEntryRequest): - request = catalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.lookup_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_entries(self, - request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, - *, - name: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchEntriesAsyncPager: - r"""Searches for Entries matching the given query and - scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]]): - The request object. - name (:class:`str`): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/global``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Required. The query against which entries in scope - should be matched. The query syntax is defined in - `Search syntax for Dataplex Universal - Catalog `__. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager: - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.SearchEntriesRequest): - request = catalog.SearchEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchEntriesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_metadata_job(self, - request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - metadata_job: Optional[catalog.MetadataJob] = None, - metadata_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a metadata job. For example, use a metadata - job to import metadata from a third-party system into - Dataplex Universal Catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "NONE" - metadata_job.import_spec.aspect_sync_mode = "NONE" - metadata_job.type_ = "EXPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): - The request object. Create metadata job request. - parent (:class:`str`): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): - Required. The metadata job resource. - This corresponds to the ``metadata_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job_id (:class:`str`): - Optional. The metadata job ID. If not provided, a unique - ID is generated with the prefix ``metadata-job-``. - - This corresponds to the ``metadata_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.MetadataJob` A - metadata job resource. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, metadata_job, metadata_job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateMetadataJobRequest): - request = catalog.CreateMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metadata_job is not None: - request.metadata_job = metadata_job - if metadata_job_id is not None: - request.metadata_job_id = metadata_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - catalog.MetadataJob, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_metadata_job(self, - request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.MetadataJob: - r"""Gets a metadata job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_metadata_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): - The request object. Get metadata job request. - name (:class:`str`): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.MetadataJob: - A metadata job resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetMetadataJobRequest): - request = catalog.GetMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_metadata_jobs(self, - request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMetadataJobsAsyncPager: - r"""Lists metadata jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): - The request object. List metadata jobs request. - parent (:class:`str`): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: - List metadata jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListMetadataJobsRequest): - request = catalog.ListMetadataJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_metadata_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMetadataJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_metadata_job(self, - request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_metadata_job(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): - The request object. Cancel metadata job request. - name (:class:`str`): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CancelMetadataJobRequest): - request = catalog.CancelMetadataJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_entry_link(self, - request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_link: Optional[catalog.EntryLink] = None, - entry_link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Creates an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry_link = dataplex_v1.EntryLink() - entry_link.entry_link_type = "entry_link_type_value" - entry_link.entry_references.name = "name_value" - entry_link.entry_references.type_ = "TARGET" - - request = dataplex_v1.CreateEntryLinkRequest( - parent="parent_value", - entry_link_id="entry_link_id_value", - entry_link=entry_link, - ) - - # Make the request - response = await client.create_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]]): - The request object. Request message for CreateEntryLink. - parent (:class:`str`): - Required. The resource name of the parent Entry Group: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_link (:class:`google.cloud.dataplex_v1.types.EntryLink`): - Required. Entry Link resource. - This corresponds to the ``entry_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_link_id (:class:`str`): - Required. Entry Link identifier - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the EntryGroup. - - This corresponds to the ``entry_link_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_link, entry_link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryLinkRequest): - request = catalog.CreateEntryLinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_link is not None: - request.entry_link = entry_link - if entry_link_id is not None: - request.entry_link_id = entry_link_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry_link(self, - request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Deletes an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryLinkRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]]): - The request object. Request message for DeleteEntryLink. - name (:class:`str`): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryLinkRequest): - request = catalog.DeleteEntryLinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_link(self, - request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Gets an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryLinkRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]]): - The request object. Request message for GetEntryLink. - name (:class:`str`): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryLinkRequest): - request = catalog.GetEntryLinkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "CatalogServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "CatalogServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py deleted file mode 100644 index d96514c69ff1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/client.py +++ /dev/null @@ -1,4507 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CatalogServiceGrpcTransport -from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport -from .transports.rest import CatalogServiceRestTransport - - -class CatalogServiceClientMeta(type): - """Metaclass for the CatalogService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] - _transport_registry["grpc"] = CatalogServiceGrpcTransport - _transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport - _transport_registry["rest"] = CatalogServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CatalogServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CatalogServiceClient(metaclass=CatalogServiceClientMeta): - """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. - They collectively let data administrators organize, manage, - secure, and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CatalogServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CatalogServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CatalogServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def aspect_type_path(project: str,location: str,aspect_type: str,) -> str: - """Returns a fully-qualified aspect_type string.""" - return "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) - - @staticmethod - def parse_aspect_type_path(path: str) -> Dict[str,str]: - """Parses a aspect_type path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/aspectTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: - """Returns a fully-qualified entry string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - - @staticmethod - def parse_entry_path(path: str) -> Dict[str,str]: - """Parses a entry path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_group_path(project: str,location: str,entry_group: str,) -> str: - """Returns a fully-qualified entry_group string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - - @staticmethod - def parse_entry_group_path(path: str) -> Dict[str,str]: - """Parses a entry_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_link_path(project: str,location: str,entry_group: str,entry_link: str,) -> str: - """Returns a fully-qualified entry_link string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format(project=project, location=location, entry_group=entry_group, entry_link=entry_link, ) - - @staticmethod - def parse_entry_link_path(path: str) -> Dict[str,str]: - """Parses a entry_link path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entryLinks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_type_path(project: str,location: str,entry_type: str,) -> str: - """Returns a fully-qualified entry_type string.""" - return "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) - - @staticmethod - def parse_entry_type_path(path: str) -> Dict[str,str]: - """Parses a entry_type path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def glossary_path(project: str,location: str,glossary: str,) -> str: - """Returns a fully-qualified glossary string.""" - return "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) - - @staticmethod - def parse_glossary_path(path: str) -> Dict[str,str]: - """Parses a glossary path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def metadata_job_path(project: str,location: str,metadataJob: str,) -> str: - """Returns a fully-qualified metadata_job string.""" - return "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) - - @staticmethod - def parse_metadata_job_path(path: str) -> Dict[str,str]: - """Parses a metadata_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = CatalogServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CatalogServiceTransport, Callable[..., CatalogServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the catalog service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CatalogServiceTransport,Callable[..., CatalogServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CatalogServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CatalogServiceClient._read_environment_variables() - self._client_cert_source = CatalogServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = CatalogServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CatalogServiceTransport) - if transport_provided: - # transport is a CatalogServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(CatalogServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - CatalogServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[CatalogServiceTransport], Callable[..., CatalogServiceTransport]] = ( - CatalogServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., CatalogServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.CatalogServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "credentialsType": None, - } - ) - - def create_entry_type(self, - request: Optional[Union[catalog.CreateEntryTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_type: Optional[catalog.EntryType] = None, - entry_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): - The request object. Create EntryType Request. - parent (str): - Required. The resource name of the EntryType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_type_id (str): - Required. EntryType identifier. - This corresponds to the ``entry_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_type, entry_type_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryTypeRequest): - request = catalog.CreateEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_type is not None: - request.entry_type = entry_type - if entry_type_id is not None: - request.entry_type_id = entry_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_entry_type(self, - request: Optional[Union[catalog.UpdateEntryTypeRequest, dict]] = None, - *, - entry_type: Optional[catalog.EntryType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): - The request object. Update EntryType Request. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - This corresponds to the ``entry_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryType` Entry - Type is a template for creating Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry_type, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryTypeRequest): - request = catalog.UpdateEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_type is not None: - request.entry_type = entry_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_type.name", request.entry_type.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_entry_type(self, - request: Optional[Union[catalog.DeleteEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delete EntryType Request. - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryTypeRequest): - request = catalog.DeleteEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_entry_types(self, - request: Optional[Union[catalog.ListEntryTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntryTypesPager: - r"""Lists EntryType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntryTypesRequest, dict]): - The request object. List EntryTypes request - parent (str): - Required. The resource name of the EntryType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: - List EntryTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryTypesRequest): - request = catalog.ListEntryTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryTypesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_type(self, - request: Optional[Union[catalog.GetEntryTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryType: - r"""Gets an EntryType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): - The request object. Get EntryType request. - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryType: - Entry Type is a template for creating - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryTypeRequest): - request = catalog.GetEntryTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_aspect_type(self, - request: Optional[Union[catalog.CreateAspectTypeRequest, dict]] = None, - *, - parent: Optional[str] = None, - aspect_type: Optional[catalog.AspectType] = None, - aspect_type_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): - The request object. Create AspectType Request. - parent (str): - Required. The resource name of the AspectType, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource. - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - aspect_type_id (str): - Required. AspectType identifier. - This corresponds to the ``aspect_type_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, aspect_type, aspect_type_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateAspectTypeRequest): - request = catalog.CreateAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if aspect_type is not None: - request.aspect_type = aspect_type - if aspect_type_id is not None: - request.aspect_type_id = aspect_type_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_aspect_type(self, - request: Optional[Union[catalog.UpdateAspectTypeRequest, dict]] = None, - *, - aspect_type: Optional[catalog.AspectType] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateAspectTypeRequest, dict]): - The request object. Update AspectType Request - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource - This corresponds to the ``aspect_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the - JSON-schema for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [aspect_type, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateAspectTypeRequest): - request = catalog.UpdateAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if aspect_type is not None: - request.aspect_type = aspect_type - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("aspect_type.name", request.aspect_type.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.AspectType, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_aspect_type(self, - request: Optional[Union[catalog.DeleteAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delete AspectType Request. - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteAspectTypeRequest): - request = catalog.DeleteAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_aspect_types(self, - request: Optional[Union[catalog.ListAspectTypesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAspectTypesPager: - r"""Lists AspectType resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): - The request object. List AspectTypes request. - parent (str): - Required. The resource name of the AspectType location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: - List AspectTypes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListAspectTypesRequest): - request = catalog.ListAspectTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_aspect_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAspectTypesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_aspect_type(self, - request: Optional[Union[catalog.GetAspectTypeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.AspectType: - r"""Gets an AspectType. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_aspect_type(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): - The request object. Get AspectType request. - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.AspectType: - AspectType is a template for creating - Aspects, and represents the JSON-schema - for a given Entry, for example, BigQuery - Table Schema. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetAspectTypeRequest): - request = catalog.GetAspectTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_aspect_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry_group(self, - request: Optional[Union[catalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group: Optional[catalog.EntryGroup] = None, - entry_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): - The request object. Create EntryGroup Request. - parent (str): - Required. The resource name of the entryGroup, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (str): - Required. EntryGroup identifier. - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_group, entry_group_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryGroupRequest): - request = catalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group is not None: - request.entry_group = entry_group - if entry_group_id is not None: - request.entry_group_id = entry_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_entry_group(self, - request: Optional[Union[catalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[catalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): - The request object. Update EntryGroup Request. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.EntryGroup` An - Entry Group represents a logical grouping of one or more - Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry_group, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryGroupRequest): - request = catalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.EntryGroup, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_entry_group(self, - request: Optional[Union[catalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): - The request object. Delete EntryGroup Request. - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryGroupRequest): - request = catalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_entry_groups(self, - request: Optional[Union[catalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntryGroupsPager: - r"""Lists EntryGroup resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntryGroupsRequest, dict]): - The request object. List entryGroups request. - parent (str): - Required. The resource name of the entryGroup location, - of the form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: - List entry groups response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntryGroupsRequest): - request = catalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryGroupsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_group(self, - request: Optional[Union[catalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryGroupRequest, dict]): - The request object. Get EntryGroup request. - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryGroup: - An Entry Group represents a logical - grouping of one or more Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryGroupRequest): - request = catalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry(self, - request: Optional[Union[catalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry: Optional[catalog.Entry] = None, - entry_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Creates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): - The request object. Create Entry request. - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (str): - Required. Entry identifier. It has to be unique within - an Entry Group. - - Entries corresponding to Google Cloud resources use an - Entry ID format based on `full resource - names `__. - The format is a full resource name of the resource - without the prefix double slashes in the API service - name part of the full resource name. This allows - retrieval of entries using their associated resource - name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or - systems other than Google Cloud. - - The maximum size of the field is 4000 characters. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry, entry_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryRequest): - request = catalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry is not None: - request.entry = entry - if entry_id is not None: - request.entry_id = entry_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry(self, - request: Optional[Union[catalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[catalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Updates an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): - The request object. Update Entry request. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. To update Aspects, - the update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [entry, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.UpdateEntryRequest): - request = catalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry(self, - request: Optional[Union[catalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Deletes an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = client.delete_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): - The request object. Delete Entry request. - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryRequest): - request = catalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entries(self, - request: Optional[Union[catalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntriesPager: - r"""Lists Entries within an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): - The request object. List Entries request. - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: - List Entries response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListEntriesRequest): - request = catalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntriesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry(self, - request: Optional[Union[catalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Gets an Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): - The request object. Get Entry request. - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryRequest): - request = catalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def lookup_entry(self, - request: Optional[Union[catalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.Entry: - r"""Looks up an entry by name using the permission on the - source system. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): - The request object. Lookup Entry request using - permissions in the source system. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.LookupEntryRequest): - request = catalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lookup_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_entries(self, - request: Optional[Union[catalog.SearchEntriesRequest, dict]] = None, - *, - name: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchEntriesPager: - r"""Searches for Entries matching the given query and - scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.SearchEntriesRequest, dict]): - The request object. - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/global``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Required. The query against which entries in scope - should be matched. The query syntax is defined in - `Search syntax for Dataplex Universal - Catalog `__. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager: - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.SearchEntriesRequest): - request = catalog.SearchEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchEntriesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_metadata_job(self, - request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - metadata_job: Optional[catalog.MetadataJob] = None, - metadata_job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a metadata job. For example, use a metadata - job to import metadata from a third-party system into - Dataplex Universal Catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "NONE" - metadata_job.import_spec.aspect_sync_mode = "NONE" - metadata_job.type_ = "EXPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): - The request object. Create metadata job request. - parent (str): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job (google.cloud.dataplex_v1.types.MetadataJob): - Required. The metadata job resource. - This corresponds to the ``metadata_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - metadata_job_id (str): - Optional. The metadata job ID. If not provided, a unique - ID is generated with the prefix ``metadata-job-``. - - This corresponds to the ``metadata_job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.MetadataJob` A - metadata job resource. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, metadata_job, metadata_job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateMetadataJobRequest): - request = catalog.CreateMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if metadata_job is not None: - request.metadata_job = metadata_job - if metadata_job_id is not None: - request.metadata_job_id = metadata_job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - catalog.MetadataJob, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_metadata_job(self, - request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.MetadataJob: - r"""Gets a metadata job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_metadata_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): - The request object. Get metadata job request. - name (str): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.MetadataJob: - A metadata job resource. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetMetadataJobRequest): - request = catalog.GetMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_metadata_jobs(self, - request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMetadataJobsPager: - r"""Lists metadata jobs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): - The request object. List metadata jobs request. - parent (str): - Required. The resource name of the parent location, in - the format - ``projects/{project_id_or_number}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: - List metadata jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.ListMetadataJobsRequest): - request = catalog.ListMetadataJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMetadataJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_metadata_job(self, - request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_metadata_job(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): - The request object. Cancel metadata job request. - name (str): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CancelMetadataJobRequest): - request = catalog.CancelMetadataJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_entry_link(self, - request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_link: Optional[catalog.EntryLink] = None, - entry_link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Creates an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry_link = dataplex_v1.EntryLink() - entry_link.entry_link_type = "entry_link_type_value" - entry_link.entry_references.name = "name_value" - entry_link.entry_references.type_ = "TARGET" - - request = dataplex_v1.CreateEntryLinkRequest( - parent="parent_value", - entry_link_id="entry_link_id_value", - entry_link=entry_link, - ) - - # Make the request - response = client.create_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]): - The request object. Request message for CreateEntryLink. - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_link (google.cloud.dataplex_v1.types.EntryLink): - Required. Entry Link resource. - This corresponds to the ``entry_link`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_link_id (str): - Required. Entry Link identifier - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the EntryGroup. - - This corresponds to the ``entry_link_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entry_link, entry_link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.CreateEntryLinkRequest): - request = catalog.CreateEntryLinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_link is not None: - request.entry_link = entry_link - if entry_link_id is not None: - request.entry_link_id = entry_link_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry_link(self, - request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Deletes an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryLinkRequest( - name="name_value", - ) - - # Make the request - response = client.delete_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]): - The request object. Request message for DeleteEntryLink. - name (str): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.DeleteEntryLinkRequest): - request = catalog.DeleteEntryLinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_link(self, - request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> catalog.EntryLink: - r"""Gets an Entry Link. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entry_link(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryLinkRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_link(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]): - The request object. Request message for GetEntryLink. - name (str): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EntryLink: - EntryLink represents a link between - two Entries. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, catalog.GetEntryLinkRequest): - request = catalog.GetEntryLinkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_link] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CatalogServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "CatalogServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py deleted file mode 100644 index 026fca877ecd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/pagers.py +++ /dev/null @@ -1,861 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import catalog - - -class ListEntryTypesPager: - """A pager for iterating through ``list_entry_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryTypes`` requests and continue to iterate - through the ``entry_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntryTypesResponse], - request: catalog.ListEntryTypesRequest, - response: catalog.ListEntryTypesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntryTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntryTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.EntryType]: - for page in self.pages: - yield from page.entry_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryTypesAsyncPager: - """A pager for iterating through ``list_entry_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryTypes`` requests and continue to iterate - through the ``entry_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntryTypesResponse]], - request: catalog.ListEntryTypesRequest, - response: catalog.ListEntryTypesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryTypesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntryTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntryTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.EntryType]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAspectTypesPager: - """A pager for iterating through ``list_aspect_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``aspect_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAspectTypes`` requests and continue to iterate - through the ``aspect_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListAspectTypesResponse], - request: catalog.ListAspectTypesRequest, - response: catalog.ListAspectTypesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListAspectTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListAspectTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.AspectType]: - for page in self.pages: - yield from page.aspect_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAspectTypesAsyncPager: - """A pager for iterating through ``list_aspect_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``aspect_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAspectTypes`` requests and continue to iterate - through the ``aspect_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAspectTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListAspectTypesResponse]], - request: catalog.ListAspectTypesRequest, - response: catalog.ListAspectTypesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAspectTypesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAspectTypesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListAspectTypesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListAspectTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.AspectType]: - async def async_generator(): - async for page in self.pages: - for response in page.aspect_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntryGroupsResponse], - request: catalog.ListEntryGroupsRequest, - response: catalog.ListEntryGroupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntryGroupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.EntryGroup]: - for page in self.pages: - yield from page.entry_groups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsAsyncPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntryGroupsResponse]], - request: catalog.ListEntryGroupsRequest, - response: catalog.ListEntryGroupsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntryGroupsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntryGroupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.EntryGroup]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_groups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListEntriesResponse], - request: catalog.ListEntriesRequest, - response: catalog.ListEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntriesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.Entry]: - for page in self.pages: - yield from page.entries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesAsyncPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListEntriesResponse]], - request: catalog.ListEntriesRequest, - response: catalog.ListEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntriesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.Entry]: - async def async_generator(): - async for page in self.pages: - for response in page.entries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchEntriesPager: - """A pager for iterating through ``search_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchEntries`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.SearchEntriesResponse], - request: catalog.SearchEntriesRequest, - response: catalog.SearchEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.SearchEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.SearchEntriesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.SearchEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.SearchEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.SearchEntriesResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchEntriesAsyncPager: - """A pager for iterating through ``search_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchEntries`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.SearchEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.SearchEntriesResponse]], - request: catalog.SearchEntriesRequest, - response: catalog.SearchEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.SearchEntriesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.SearchEntriesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.SearchEntriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.SearchEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.SearchEntriesResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetadataJobsPager: - """A pager for iterating through ``list_metadata_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``metadata_jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMetadataJobs`` requests and continue to iterate - through the ``metadata_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., catalog.ListMetadataJobsResponse], - request: catalog.ListMetadataJobsRequest, - response: catalog.ListMetadataJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListMetadataJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[catalog.MetadataJob]: - for page in self.pages: - yield from page.metadata_jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMetadataJobsAsyncPager: - """A pager for iterating through ``list_metadata_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``metadata_jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMetadataJobs`` requests and continue to iterate - through the ``metadata_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], - request: catalog.ListMetadataJobsRequest, - response: catalog.ListMetadataJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = catalog.ListMetadataJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: - async def async_generator(): - async for page in self.pages: - for response in page.metadata_jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst deleted file mode 100644 index c14dcbeef235..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`CatalogServiceTransport` is the ABC for all transports. -- public child `CatalogServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `CatalogServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseCatalogServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `CatalogServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py deleted file mode 100644 index 8ee5c702fdcc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CatalogServiceTransport -from .grpc import CatalogServiceGrpcTransport -from .grpc_asyncio import CatalogServiceGrpcAsyncIOTransport -from .rest import CatalogServiceRestTransport -from .rest import CatalogServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]] -_transport_registry['grpc'] = CatalogServiceGrpcTransport -_transport_registry['grpc_asyncio'] = CatalogServiceGrpcAsyncIOTransport -_transport_registry['rest'] = CatalogServiceRestTransport - -__all__ = ( - 'CatalogServiceTransport', - 'CatalogServiceGrpcTransport', - 'CatalogServiceGrpcAsyncIOTransport', - 'CatalogServiceRestTransport', - 'CatalogServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py deleted file mode 100644 index 501523591741..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ /dev/null @@ -1,754 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class CatalogServiceTransport(abc.ABC): - """Abstract transport class for CatalogService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_entry_type: gapic_v1.method.wrap_method( - self.create_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_type: gapic_v1.method.wrap_method( - self.update_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_type: gapic_v1.method.wrap_method( - self.delete_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_types: gapic_v1.method.wrap_method( - self.list_entry_types, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_type: gapic_v1.method.wrap_method( - self.get_entry_type, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_aspect_type: gapic_v1.method.wrap_method( - self.create_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_aspect_type: gapic_v1.method.wrap_method( - self.update_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_aspect_type: gapic_v1.method.wrap_method( - self.delete_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_aspect_types: gapic_v1.method.wrap_method( - self.list_aspect_types, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_aspect_type: gapic_v1.method.wrap_method( - self.get_aspect_type, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_group: gapic_v1.method.wrap_method( - self.delete_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_groups: gapic_v1.method.wrap_method( - self.list_entry_groups, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_group: gapic_v1.method.wrap_method( - self.get_entry_group, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry: gapic_v1.method.wrap_method( - self.delete_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entries: gapic_v1.method.wrap_method( - self.list_entries, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_entry: gapic_v1.method.wrap_method( - self.get_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.lookup_entry: gapic_v1.method.wrap_method( - self.lookup_entry, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.search_entries: gapic_v1.method.wrap_method( - self.search_entries, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_metadata_job: gapic_v1.method.wrap_method( - self.create_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_metadata_job: gapic_v1.method.wrap_method( - self.get_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.list_metadata_jobs: gapic_v1.method.wrap_method( - self.list_metadata_jobs, - default_timeout=None, - client_info=client_info, - ), - self.cancel_metadata_job: gapic_v1.method.wrap_method( - self.cancel_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.create_entry_link: gapic_v1.method.wrap_method( - self.create_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry_link: gapic_v1.method.wrap_method( - self.delete_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.get_entry_link: gapic_v1.method.wrap_method( - self.get_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - Union[ - catalog.ListEntryTypesResponse, - Awaitable[catalog.ListEntryTypesResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - Union[ - catalog.EntryType, - Awaitable[catalog.EntryType] - ]]: - raise NotImplementedError() - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - Union[ - catalog.ListAspectTypesResponse, - Awaitable[catalog.ListAspectTypesResponse] - ]]: - raise NotImplementedError() - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - Union[ - catalog.AspectType, - Awaitable[catalog.AspectType] - ]]: - raise NotImplementedError() - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - Union[ - catalog.ListEntryGroupsResponse, - Awaitable[catalog.ListEntryGroupsResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - Union[ - catalog.EntryGroup, - Awaitable[catalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - Union[ - catalog.ListEntriesResponse, - Awaitable[catalog.ListEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - Union[ - catalog.Entry, - Awaitable[catalog.Entry] - ]]: - raise NotImplementedError() - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - Union[ - catalog.SearchEntriesResponse, - Awaitable[catalog.SearchEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - Union[ - catalog.MetadataJob, - Awaitable[catalog.MetadataJob] - ]]: - raise NotImplementedError() - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - Union[ - catalog.ListMetadataJobsResponse, - Awaitable[catalog.ListMetadataJobsResponse] - ]]: - raise NotImplementedError() - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_entry_link(self) -> Callable[ - [catalog.CreateEntryLinkRequest], - Union[ - catalog.EntryLink, - Awaitable[catalog.EntryLink] - ]]: - raise NotImplementedError() - - @property - def delete_entry_link(self) -> Callable[ - [catalog.DeleteEntryLinkRequest], - Union[ - catalog.EntryLink, - Awaitable[catalog.EntryLink] - ]]: - raise NotImplementedError() - - @property - def get_entry_link(self) -> Callable[ - [catalog.GetEntryLinkRequest], - Union[ - catalog.EntryLink, - Awaitable[catalog.EntryLink] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'CatalogServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py deleted file mode 100644 index 108a93fed235..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ /dev/null @@ -1,1220 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class CatalogServiceGrpcTransport(CatalogServiceTransport): - """gRPC backend transport for CatalogService. - - The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. - They collectively let data administrators organize, manage, - secure, and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create entry type method over gRPC. - - Creates an EntryType. - - Returns: - Callable[[~.CreateEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_type' not in self._stubs: - self._stubs['create_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', - request_serializer=catalog.CreateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_type'] - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update entry type method over gRPC. - - Updates an EntryType. - - Returns: - Callable[[~.UpdateEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_type' not in self._stubs: - self._stubs['update_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', - request_serializer=catalog.UpdateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_type'] - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete entry type method over gRPC. - - Deletes an EntryType. - - Returns: - Callable[[~.DeleteEntryTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_type' not in self._stubs: - self._stubs['delete_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', - request_serializer=catalog.DeleteEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_type'] - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - catalog.ListEntryTypesResponse]: - r"""Return a callable for the list entry types method over gRPC. - - Lists EntryType resources in a project and location. - - Returns: - Callable[[~.ListEntryTypesRequest], - ~.ListEntryTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_types' not in self._stubs: - self._stubs['list_entry_types'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', - request_serializer=catalog.ListEntryTypesRequest.serialize, - response_deserializer=catalog.ListEntryTypesResponse.deserialize, - ) - return self._stubs['list_entry_types'] - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - catalog.EntryType]: - r"""Return a callable for the get entry type method over gRPC. - - Gets an EntryType. - - Returns: - Callable[[~.GetEntryTypeRequest], - ~.EntryType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_type' not in self._stubs: - self._stubs['get_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryType', - request_serializer=catalog.GetEntryTypeRequest.serialize, - response_deserializer=catalog.EntryType.deserialize, - ) - return self._stubs['get_entry_type'] - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create aspect type method over gRPC. - - Creates an AspectType. - - Returns: - Callable[[~.CreateAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_aspect_type' not in self._stubs: - self._stubs['create_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', - request_serializer=catalog.CreateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_aspect_type'] - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update aspect type method over gRPC. - - Updates an AspectType. - - Returns: - Callable[[~.UpdateAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_aspect_type' not in self._stubs: - self._stubs['update_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', - request_serializer=catalog.UpdateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_aspect_type'] - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete aspect type method over gRPC. - - Deletes an AspectType. - - Returns: - Callable[[~.DeleteAspectTypeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_aspect_type' not in self._stubs: - self._stubs['delete_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', - request_serializer=catalog.DeleteAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_aspect_type'] - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - catalog.ListAspectTypesResponse]: - r"""Return a callable for the list aspect types method over gRPC. - - Lists AspectType resources in a project and location. - - Returns: - Callable[[~.ListAspectTypesRequest], - ~.ListAspectTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_aspect_types' not in self._stubs: - self._stubs['list_aspect_types'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', - request_serializer=catalog.ListAspectTypesRequest.serialize, - response_deserializer=catalog.ListAspectTypesResponse.deserialize, - ) - return self._stubs['list_aspect_types'] - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - catalog.AspectType]: - r"""Return a callable for the get aspect type method over gRPC. - - Gets an AspectType. - - Returns: - Callable[[~.GetAspectTypeRequest], - ~.AspectType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_aspect_type' not in self._stubs: - self._stubs['get_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetAspectType', - request_serializer=catalog.GetAspectTypeRequest.serialize, - response_deserializer=catalog.AspectType.deserialize, - ) - return self._stubs['get_aspect_type'] - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an EntryGroup. - - Returns: - Callable[[~.CreateEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', - request_serializer=catalog.CreateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', - request_serializer=catalog.UpdateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', - request_serializer=catalog.DeleteEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - catalog.ListEntryGroupsResponse]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists EntryGroup resources in a project and location. - - Returns: - Callable[[~.ListEntryGroupsRequest], - ~.ListEntryGroupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', - request_serializer=catalog.ListEntryGroupsRequest.serialize, - response_deserializer=catalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - catalog.EntryGroup]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', - request_serializer=catalog.GetEntryGroupRequest.serialize, - response_deserializer=catalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - catalog.Entry]: - r"""Return a callable for the create entry method over gRPC. - - Creates an Entry. - - Returns: - Callable[[~.CreateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntry', - request_serializer=catalog.CreateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - catalog.Entry]: - r"""Return a callable for the update entry method over gRPC. - - Updates an Entry. - - Returns: - Callable[[~.UpdateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', - request_serializer=catalog.UpdateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - catalog.Entry]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an Entry. - - Returns: - Callable[[~.DeleteEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', - request_serializer=catalog.DeleteEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['delete_entry'] - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - catalog.ListEntriesResponse]: - r"""Return a callable for the list entries method over gRPC. - - Lists Entries within an EntryGroup. - - Returns: - Callable[[~.ListEntriesRequest], - ~.ListEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntries', - request_serializer=catalog.ListEntriesRequest.serialize, - response_deserializer=catalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - catalog.Entry]: - r"""Return a callable for the get entry method over gRPC. - - Gets an Entry. - - Returns: - Callable[[~.GetEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntry', - request_serializer=catalog.GetEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - catalog.Entry]: - r"""Return a callable for the lookup entry method over gRPC. - - Looks up an entry by name using the permission on the - source system. - - Returns: - Callable[[~.LookupEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/LookupEntry', - request_serializer=catalog.LookupEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - catalog.SearchEntriesResponse]: - r"""Return a callable for the search entries method over gRPC. - - Searches for Entries matching the given query and - scope. - - Returns: - Callable[[~.SearchEntriesRequest], - ~.SearchEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_entries' not in self._stubs: - self._stubs['search_entries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/SearchEntries', - request_serializer=catalog.SearchEntriesRequest.serialize, - response_deserializer=catalog.SearchEntriesResponse.deserialize, - ) - return self._stubs['search_entries'] - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the create metadata job method over gRPC. - - Creates a metadata job. For example, use a metadata - job to import metadata from a third-party system into - Dataplex Universal Catalog. - - Returns: - Callable[[~.CreateMetadataJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metadata_job' not in self._stubs: - self._stubs['create_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', - request_serializer=catalog.CreateMetadataJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_metadata_job'] - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - catalog.MetadataJob]: - r"""Return a callable for the get metadata job method over gRPC. - - Gets a metadata job. - - Returns: - Callable[[~.GetMetadataJobRequest], - ~.MetadataJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metadata_job' not in self._stubs: - self._stubs['get_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', - request_serializer=catalog.GetMetadataJobRequest.serialize, - response_deserializer=catalog.MetadataJob.deserialize, - ) - return self._stubs['get_metadata_job'] - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - catalog.ListMetadataJobsResponse]: - r"""Return a callable for the list metadata jobs method over gRPC. - - Lists metadata jobs. - - Returns: - Callable[[~.ListMetadataJobsRequest], - ~.ListMetadataJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metadata_jobs' not in self._stubs: - self._stubs['list_metadata_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', - request_serializer=catalog.ListMetadataJobsRequest.serialize, - response_deserializer=catalog.ListMetadataJobsResponse.deserialize, - ) - return self._stubs['list_metadata_jobs'] - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel metadata job method over gRPC. - - Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - Returns: - Callable[[~.CancelMetadataJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_metadata_job' not in self._stubs: - self._stubs['cancel_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', - request_serializer=catalog.CancelMetadataJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_metadata_job'] - - @property - def create_entry_link(self) -> Callable[ - [catalog.CreateEntryLinkRequest], - catalog.EntryLink]: - r"""Return a callable for the create entry link method over gRPC. - - Creates an Entry Link. - - Returns: - Callable[[~.CreateEntryLinkRequest], - ~.EntryLink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_link' not in self._stubs: - self._stubs['create_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryLink', - request_serializer=catalog.CreateEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['create_entry_link'] - - @property - def delete_entry_link(self) -> Callable[ - [catalog.DeleteEntryLinkRequest], - catalog.EntryLink]: - r"""Return a callable for the delete entry link method over gRPC. - - Deletes an Entry Link. - - Returns: - Callable[[~.DeleteEntryLinkRequest], - ~.EntryLink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_link' not in self._stubs: - self._stubs['delete_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink', - request_serializer=catalog.DeleteEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['delete_entry_link'] - - @property - def get_entry_link(self) -> Callable[ - [catalog.GetEntryLinkRequest], - catalog.EntryLink]: - r"""Return a callable for the get entry link method over gRPC. - - Gets an Entry Link. - - Returns: - Callable[[~.GetEntryLinkRequest], - ~.EntryLink]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_link' not in self._stubs: - self._stubs['get_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryLink', - request_serializer=catalog.GetEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['get_entry_link'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'CatalogServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py deleted file mode 100644 index 1a45c31c78a0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1521 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import catalog -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import CatalogServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): - """gRPC AsyncIO backend transport for CatalogService. - - The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. - They collectively let data administrators organize, manage, - secure, and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create entry type method over gRPC. - - Creates an EntryType. - - Returns: - Callable[[~.CreateEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_type' not in self._stubs: - self._stubs['create_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryType', - request_serializer=catalog.CreateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_type'] - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update entry type method over gRPC. - - Updates an EntryType. - - Returns: - Callable[[~.UpdateEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_type' not in self._stubs: - self._stubs['update_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryType', - request_serializer=catalog.UpdateEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_type'] - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete entry type method over gRPC. - - Deletes an EntryType. - - Returns: - Callable[[~.DeleteEntryTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_type' not in self._stubs: - self._stubs['delete_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryType', - request_serializer=catalog.DeleteEntryTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_type'] - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - Awaitable[catalog.ListEntryTypesResponse]]: - r"""Return a callable for the list entry types method over gRPC. - - Lists EntryType resources in a project and location. - - Returns: - Callable[[~.ListEntryTypesRequest], - Awaitable[~.ListEntryTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_types' not in self._stubs: - self._stubs['list_entry_types'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryTypes', - request_serializer=catalog.ListEntryTypesRequest.serialize, - response_deserializer=catalog.ListEntryTypesResponse.deserialize, - ) - return self._stubs['list_entry_types'] - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - Awaitable[catalog.EntryType]]: - r"""Return a callable for the get entry type method over gRPC. - - Gets an EntryType. - - Returns: - Callable[[~.GetEntryTypeRequest], - Awaitable[~.EntryType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_type' not in self._stubs: - self._stubs['get_entry_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryType', - request_serializer=catalog.GetEntryTypeRequest.serialize, - response_deserializer=catalog.EntryType.deserialize, - ) - return self._stubs['get_entry_type'] - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create aspect type method over gRPC. - - Creates an AspectType. - - Returns: - Callable[[~.CreateAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_aspect_type' not in self._stubs: - self._stubs['create_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateAspectType', - request_serializer=catalog.CreateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_aspect_type'] - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update aspect type method over gRPC. - - Updates an AspectType. - - Returns: - Callable[[~.UpdateAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_aspect_type' not in self._stubs: - self._stubs['update_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateAspectType', - request_serializer=catalog.UpdateAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_aspect_type'] - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete aspect type method over gRPC. - - Deletes an AspectType. - - Returns: - Callable[[~.DeleteAspectTypeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_aspect_type' not in self._stubs: - self._stubs['delete_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteAspectType', - request_serializer=catalog.DeleteAspectTypeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_aspect_type'] - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - Awaitable[catalog.ListAspectTypesResponse]]: - r"""Return a callable for the list aspect types method over gRPC. - - Lists AspectType resources in a project and location. - - Returns: - Callable[[~.ListAspectTypesRequest], - Awaitable[~.ListAspectTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_aspect_types' not in self._stubs: - self._stubs['list_aspect_types'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListAspectTypes', - request_serializer=catalog.ListAspectTypesRequest.serialize, - response_deserializer=catalog.ListAspectTypesResponse.deserialize, - ) - return self._stubs['list_aspect_types'] - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - Awaitable[catalog.AspectType]]: - r"""Return a callable for the get aspect type method over gRPC. - - Gets an AspectType. - - Returns: - Callable[[~.GetAspectTypeRequest], - Awaitable[~.AspectType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_aspect_type' not in self._stubs: - self._stubs['get_aspect_type'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetAspectType', - request_serializer=catalog.GetAspectTypeRequest.serialize, - response_deserializer=catalog.AspectType.deserialize, - ) - return self._stubs['get_aspect_type'] - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an EntryGroup. - - Returns: - Callable[[~.CreateEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryGroup', - request_serializer=catalog.CreateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntryGroup', - request_serializer=catalog.UpdateEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryGroup', - request_serializer=catalog.DeleteEntryGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - Awaitable[catalog.ListEntryGroupsResponse]]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists EntryGroup resources in a project and location. - - Returns: - Callable[[~.ListEntryGroupsRequest], - Awaitable[~.ListEntryGroupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntryGroups', - request_serializer=catalog.ListEntryGroupsRequest.serialize, - response_deserializer=catalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - Awaitable[catalog.EntryGroup]]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryGroup', - request_serializer=catalog.GetEntryGroupRequest.serialize, - response_deserializer=catalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the create entry method over gRPC. - - Creates an Entry. - - Returns: - Callable[[~.CreateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntry', - request_serializer=catalog.CreateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the update entry method over gRPC. - - Updates an Entry. - - Returns: - Callable[[~.UpdateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/UpdateEntry', - request_serializer=catalog.UpdateEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an Entry. - - Returns: - Callable[[~.DeleteEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntry', - request_serializer=catalog.DeleteEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['delete_entry'] - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - Awaitable[catalog.ListEntriesResponse]]: - r"""Return a callable for the list entries method over gRPC. - - Lists Entries within an EntryGroup. - - Returns: - Callable[[~.ListEntriesRequest], - Awaitable[~.ListEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListEntries', - request_serializer=catalog.ListEntriesRequest.serialize, - response_deserializer=catalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the get entry method over gRPC. - - Gets an Entry. - - Returns: - Callable[[~.GetEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntry', - request_serializer=catalog.GetEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - Awaitable[catalog.Entry]]: - r"""Return a callable for the lookup entry method over gRPC. - - Looks up an entry by name using the permission on the - source system. - - Returns: - Callable[[~.LookupEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/LookupEntry', - request_serializer=catalog.LookupEntryRequest.serialize, - response_deserializer=catalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - Awaitable[catalog.SearchEntriesResponse]]: - r"""Return a callable for the search entries method over gRPC. - - Searches for Entries matching the given query and - scope. - - Returns: - Callable[[~.SearchEntriesRequest], - Awaitable[~.SearchEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_entries' not in self._stubs: - self._stubs['search_entries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/SearchEntries', - request_serializer=catalog.SearchEntriesRequest.serialize, - response_deserializer=catalog.SearchEntriesResponse.deserialize, - ) - return self._stubs['search_entries'] - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create metadata job method over gRPC. - - Creates a metadata job. For example, use a metadata - job to import metadata from a third-party system into - Dataplex Universal Catalog. - - Returns: - Callable[[~.CreateMetadataJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_metadata_job' not in self._stubs: - self._stubs['create_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob', - request_serializer=catalog.CreateMetadataJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_metadata_job'] - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - Awaitable[catalog.MetadataJob]]: - r"""Return a callable for the get metadata job method over gRPC. - - Gets a metadata job. - - Returns: - Callable[[~.GetMetadataJobRequest], - Awaitable[~.MetadataJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_metadata_job' not in self._stubs: - self._stubs['get_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetMetadataJob', - request_serializer=catalog.GetMetadataJobRequest.serialize, - response_deserializer=catalog.MetadataJob.deserialize, - ) - return self._stubs['get_metadata_job'] - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - Awaitable[catalog.ListMetadataJobsResponse]]: - r"""Return a callable for the list metadata jobs method over gRPC. - - Lists metadata jobs. - - Returns: - Callable[[~.ListMetadataJobsRequest], - Awaitable[~.ListMetadataJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_metadata_jobs' not in self._stubs: - self._stubs['list_metadata_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs', - request_serializer=catalog.ListMetadataJobsRequest.serialize, - response_deserializer=catalog.ListMetadataJobsResponse.deserialize, - ) - return self._stubs['list_metadata_jobs'] - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel metadata job method over gRPC. - - Cancels a metadata job. - - If you cancel a metadata import job that is in progress, - the changes in the job might be partially applied. We - recommend that you reset the state of the entry groups - in your project by running another metadata job that - reverts the changes from the canceled job. - - Returns: - Callable[[~.CancelMetadataJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_metadata_job' not in self._stubs: - self._stubs['cancel_metadata_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob', - request_serializer=catalog.CancelMetadataJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_metadata_job'] - - @property - def create_entry_link(self) -> Callable[ - [catalog.CreateEntryLinkRequest], - Awaitable[catalog.EntryLink]]: - r"""Return a callable for the create entry link method over gRPC. - - Creates an Entry Link. - - Returns: - Callable[[~.CreateEntryLinkRequest], - Awaitable[~.EntryLink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_link' not in self._stubs: - self._stubs['create_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/CreateEntryLink', - request_serializer=catalog.CreateEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['create_entry_link'] - - @property - def delete_entry_link(self) -> Callable[ - [catalog.DeleteEntryLinkRequest], - Awaitable[catalog.EntryLink]]: - r"""Return a callable for the delete entry link method over gRPC. - - Deletes an Entry Link. - - Returns: - Callable[[~.DeleteEntryLinkRequest], - Awaitable[~.EntryLink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_link' not in self._stubs: - self._stubs['delete_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink', - request_serializer=catalog.DeleteEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['delete_entry_link'] - - @property - def get_entry_link(self) -> Callable[ - [catalog.GetEntryLinkRequest], - Awaitable[catalog.EntryLink]]: - r"""Return a callable for the get entry link method over gRPC. - - Gets an Entry Link. - - Returns: - Callable[[~.GetEntryLinkRequest], - Awaitable[~.EntryLink]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_link' not in self._stubs: - self._stubs['get_entry_link'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CatalogService/GetEntryLink', - request_serializer=catalog.GetEntryLinkRequest.serialize, - response_deserializer=catalog.EntryLink.deserialize, - ) - return self._stubs['get_entry_link'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_entry_type: self._wrap_method( - self.create_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_type: self._wrap_method( - self.update_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_type: self._wrap_method( - self.delete_entry_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_types: self._wrap_method( - self.list_entry_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_type: self._wrap_method( - self.get_entry_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_aspect_type: self._wrap_method( - self.create_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.update_aspect_type: self._wrap_method( - self.update_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_aspect_type: self._wrap_method( - self.delete_aspect_type, - default_timeout=60.0, - client_info=client_info, - ), - self.list_aspect_types: self._wrap_method( - self.list_aspect_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_aspect_type: self._wrap_method( - self.get_aspect_type, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry_group: self._wrap_method( - self.create_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry_group: self._wrap_method( - self.update_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry_group: self._wrap_method( - self.delete_entry_group, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entry_groups: self._wrap_method( - self.list_entry_groups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_entry_group: self._wrap_method( - self.get_entry_group, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_entry: self._wrap_method( - self.create_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entry: self._wrap_method( - self.update_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entry: self._wrap_method( - self.delete_entry, - default_timeout=60.0, - client_info=client_info, - ), - self.list_entries: self._wrap_method( - self.list_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_entry: self._wrap_method( - self.get_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.lookup_entry: self._wrap_method( - self.lookup_entry, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.search_entries: self._wrap_method( - self.search_entries, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_metadata_job: self._wrap_method( - self.create_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.get_metadata_job: self._wrap_method( - self.get_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.list_metadata_jobs: self._wrap_method( - self.list_metadata_jobs, - default_timeout=None, - client_info=client_info, - ), - self.cancel_metadata_job: self._wrap_method( - self.cancel_metadata_job, - default_timeout=None, - client_info=client_info, - ), - self.create_entry_link: self._wrap_method( - self.create_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry_link: self._wrap_method( - self.delete_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.get_entry_link: self._wrap_method( - self.get_entry_link, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'CatalogServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py deleted file mode 100644 index 2cf3dbe13573..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py +++ /dev/null @@ -1,6010 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import catalog -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseCatalogServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class CatalogServiceRestInterceptor: - """Interceptor for CatalogService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CatalogServiceRestTransport. - - .. code-block:: python - class MyCustomCatalogServiceInterceptor(CatalogServiceRestInterceptor): - def pre_cancel_metadata_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_create_aspect_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_aspect_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_entry(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_entry(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_entry_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_entry_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_entry_link(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_entry_link(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_entry_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_entry_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_metadata_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_metadata_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_aspect_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_aspect_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_entry(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_entry(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_entry_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_entry_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_entry_link(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_entry_link(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_entry_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_entry_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_aspect_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_aspect_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_entry(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_entry(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_entry_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_entry_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_entry_link(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_entry_link(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_entry_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_entry_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_metadata_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_metadata_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_aspect_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_aspect_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_entries(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_entry_groups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_entry_groups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_entry_types(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_entry_types(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_metadata_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_metadata_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_lookup_entry(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_lookup_entry(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_entries(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_aspect_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_aspect_type(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_entry(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_entry(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_entry_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_entry_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_entry_type(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_entry_type(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CatalogServiceRestTransport(interceptor=MyCustomCatalogServiceInterceptor()) - client = CatalogServiceClient(transport=transport) - - - """ - def pre_cancel_metadata_job(self, request: catalog.CancelMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CancelMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_metadata_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def pre_create_aspect_type(self, request: catalog.CreateAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_aspect_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_aspect_type - - DEPRECATED. Please use the `post_create_aspect_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_aspect_type` interceptor runs - before the `post_create_aspect_type_with_metadata` interceptor. - """ - return response - - def post_create_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_aspect_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_aspect_type_with_metadata` - interceptor in new development instead of the `post_create_aspect_type` interceptor. - When both interceptors are used, this `post_create_aspect_type_with_metadata` interceptor runs after the - `post_create_aspect_type` interceptor. The (possibly modified) response returned by - `post_create_aspect_type` will be passed to - `post_create_aspect_type_with_metadata`. - """ - return response, metadata - - def pre_create_entry(self, request: catalog.CreateEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_entry - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_entry(self, response: catalog.Entry) -> catalog.Entry: - """Post-rpc interceptor for create_entry - - DEPRECATED. Please use the `post_create_entry_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_entry` interceptor runs - before the `post_create_entry_with_metadata` interceptor. - """ - return response - - def post_create_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_entry - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_entry_with_metadata` - interceptor in new development instead of the `post_create_entry` interceptor. - When both interceptors are used, this `post_create_entry_with_metadata` interceptor runs after the - `post_create_entry` interceptor. The (possibly modified) response returned by - `post_create_entry` will be passed to - `post_create_entry_with_metadata`. - """ - return response, metadata - - def pre_create_entry_group(self, request: catalog.CreateEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_entry_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_entry_group - - DEPRECATED. Please use the `post_create_entry_group_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_entry_group` interceptor runs - before the `post_create_entry_group_with_metadata` interceptor. - """ - return response - - def post_create_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_entry_group - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_entry_group_with_metadata` - interceptor in new development instead of the `post_create_entry_group` interceptor. - When both interceptors are used, this `post_create_entry_group_with_metadata` interceptor runs after the - `post_create_entry_group` interceptor. The (possibly modified) response returned by - `post_create_entry_group` will be passed to - `post_create_entry_group_with_metadata`. - """ - return response, metadata - - def pre_create_entry_link(self, request: catalog.CreateEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_entry_link - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: - """Post-rpc interceptor for create_entry_link - - DEPRECATED. Please use the `post_create_entry_link_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_entry_link` interceptor runs - before the `post_create_entry_link_with_metadata` interceptor. - """ - return response - - def post_create_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_entry_link - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_entry_link_with_metadata` - interceptor in new development instead of the `post_create_entry_link` interceptor. - When both interceptors are used, this `post_create_entry_link_with_metadata` interceptor runs after the - `post_create_entry_link` interceptor. The (possibly modified) response returned by - `post_create_entry_link` will be passed to - `post_create_entry_link_with_metadata`. - """ - return response, metadata - - def pre_create_entry_type(self, request: catalog.CreateEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_entry_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_entry_type - - DEPRECATED. Please use the `post_create_entry_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_entry_type` interceptor runs - before the `post_create_entry_type_with_metadata` interceptor. - """ - return response - - def post_create_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_entry_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_entry_type_with_metadata` - interceptor in new development instead of the `post_create_entry_type` interceptor. - When both interceptors are used, this `post_create_entry_type_with_metadata` interceptor runs after the - `post_create_entry_type` interceptor. The (possibly modified) response returned by - `post_create_entry_type` will be passed to - `post_create_entry_type_with_metadata`. - """ - return response, metadata - - def pre_create_metadata_job(self, request: catalog.CreateMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.CreateMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_metadata_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_create_metadata_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_metadata_job - - DEPRECATED. Please use the `post_create_metadata_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_create_metadata_job` interceptor runs - before the `post_create_metadata_job_with_metadata` interceptor. - """ - return response - - def post_create_metadata_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_metadata_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_create_metadata_job_with_metadata` - interceptor in new development instead of the `post_create_metadata_job` interceptor. - When both interceptors are used, this `post_create_metadata_job_with_metadata` interceptor runs after the - `post_create_metadata_job` interceptor. The (possibly modified) response returned by - `post_create_metadata_job` will be passed to - `post_create_metadata_job_with_metadata`. - """ - return response, metadata - - def pre_delete_aspect_type(self, request: catalog.DeleteAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_aspect_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_aspect_type - - DEPRECATED. Please use the `post_delete_aspect_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_delete_aspect_type` interceptor runs - before the `post_delete_aspect_type_with_metadata` interceptor. - """ - return response - - def post_delete_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_aspect_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_delete_aspect_type_with_metadata` - interceptor in new development instead of the `post_delete_aspect_type` interceptor. - When both interceptors are used, this `post_delete_aspect_type_with_metadata` interceptor runs after the - `post_delete_aspect_type` interceptor. The (possibly modified) response returned by - `post_delete_aspect_type` will be passed to - `post_delete_aspect_type_with_metadata`. - """ - return response, metadata - - def pre_delete_entry(self, request: catalog.DeleteEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_entry - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_entry(self, response: catalog.Entry) -> catalog.Entry: - """Post-rpc interceptor for delete_entry - - DEPRECATED. Please use the `post_delete_entry_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_delete_entry` interceptor runs - before the `post_delete_entry_with_metadata` interceptor. - """ - return response - - def post_delete_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_entry - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_delete_entry_with_metadata` - interceptor in new development instead of the `post_delete_entry` interceptor. - When both interceptors are used, this `post_delete_entry_with_metadata` interceptor runs after the - `post_delete_entry` interceptor. The (possibly modified) response returned by - `post_delete_entry` will be passed to - `post_delete_entry_with_metadata`. - """ - return response, metadata - - def pre_delete_entry_group(self, request: catalog.DeleteEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_entry_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_entry_group - - DEPRECATED. Please use the `post_delete_entry_group_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_delete_entry_group` interceptor runs - before the `post_delete_entry_group_with_metadata` interceptor. - """ - return response - - def post_delete_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_entry_group - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_delete_entry_group_with_metadata` - interceptor in new development instead of the `post_delete_entry_group` interceptor. - When both interceptors are used, this `post_delete_entry_group_with_metadata` interceptor runs after the - `post_delete_entry_group` interceptor. The (possibly modified) response returned by - `post_delete_entry_group` will be passed to - `post_delete_entry_group_with_metadata`. - """ - return response, metadata - - def pre_delete_entry_link(self, request: catalog.DeleteEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_entry_link - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: - """Post-rpc interceptor for delete_entry_link - - DEPRECATED. Please use the `post_delete_entry_link_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_delete_entry_link` interceptor runs - before the `post_delete_entry_link_with_metadata` interceptor. - """ - return response - - def post_delete_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_entry_link - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_delete_entry_link_with_metadata` - interceptor in new development instead of the `post_delete_entry_link` interceptor. - When both interceptors are used, this `post_delete_entry_link_with_metadata` interceptor runs after the - `post_delete_entry_link` interceptor. The (possibly modified) response returned by - `post_delete_entry_link` will be passed to - `post_delete_entry_link_with_metadata`. - """ - return response, metadata - - def pre_delete_entry_type(self, request: catalog.DeleteEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.DeleteEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_entry_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_entry_type - - DEPRECATED. Please use the `post_delete_entry_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_delete_entry_type` interceptor runs - before the `post_delete_entry_type_with_metadata` interceptor. - """ - return response - - def post_delete_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_entry_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_delete_entry_type_with_metadata` - interceptor in new development instead of the `post_delete_entry_type` interceptor. - When both interceptors are used, this `post_delete_entry_type_with_metadata` interceptor runs after the - `post_delete_entry_type` interceptor. The (possibly modified) response returned by - `post_delete_entry_type` will be passed to - `post_delete_entry_type_with_metadata`. - """ - return response, metadata - - def pre_get_aspect_type(self, request: catalog.GetAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_aspect_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_aspect_type(self, response: catalog.AspectType) -> catalog.AspectType: - """Post-rpc interceptor for get_aspect_type - - DEPRECATED. Please use the `post_get_aspect_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_aspect_type` interceptor runs - before the `post_get_aspect_type_with_metadata` interceptor. - """ - return response - - def post_get_aspect_type_with_metadata(self, response: catalog.AspectType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.AspectType, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_aspect_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_aspect_type_with_metadata` - interceptor in new development instead of the `post_get_aspect_type` interceptor. - When both interceptors are used, this `post_get_aspect_type_with_metadata` interceptor runs after the - `post_get_aspect_type` interceptor. The (possibly modified) response returned by - `post_get_aspect_type` will be passed to - `post_get_aspect_type_with_metadata`. - """ - return response, metadata - - def pre_get_entry(self, request: catalog.GetEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_entry - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_entry(self, response: catalog.Entry) -> catalog.Entry: - """Post-rpc interceptor for get_entry - - DEPRECATED. Please use the `post_get_entry_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_entry` interceptor runs - before the `post_get_entry_with_metadata` interceptor. - """ - return response - - def post_get_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_entry - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_entry_with_metadata` - interceptor in new development instead of the `post_get_entry` interceptor. - When both interceptors are used, this `post_get_entry_with_metadata` interceptor runs after the - `post_get_entry` interceptor. The (possibly modified) response returned by - `post_get_entry` will be passed to - `post_get_entry_with_metadata`. - """ - return response, metadata - - def pre_get_entry_group(self, request: catalog.GetEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_entry_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_entry_group(self, response: catalog.EntryGroup) -> catalog.EntryGroup: - """Post-rpc interceptor for get_entry_group - - DEPRECATED. Please use the `post_get_entry_group_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_entry_group` interceptor runs - before the `post_get_entry_group_with_metadata` interceptor. - """ - return response - - def post_get_entry_group_with_metadata(self, response: catalog.EntryGroup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryGroup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_entry_group - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_entry_group_with_metadata` - interceptor in new development instead of the `post_get_entry_group` interceptor. - When both interceptors are used, this `post_get_entry_group_with_metadata` interceptor runs after the - `post_get_entry_group` interceptor. The (possibly modified) response returned by - `post_get_entry_group` will be passed to - `post_get_entry_group_with_metadata`. - """ - return response, metadata - - def pre_get_entry_link(self, request: catalog.GetEntryLinkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_entry_link - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: - """Post-rpc interceptor for get_entry_link - - DEPRECATED. Please use the `post_get_entry_link_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_entry_link` interceptor runs - before the `post_get_entry_link_with_metadata` interceptor. - """ - return response - - def post_get_entry_link_with_metadata(self, response: catalog.EntryLink, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_entry_link - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_entry_link_with_metadata` - interceptor in new development instead of the `post_get_entry_link` interceptor. - When both interceptors are used, this `post_get_entry_link_with_metadata` interceptor runs after the - `post_get_entry_link` interceptor. The (possibly modified) response returned by - `post_get_entry_link` will be passed to - `post_get_entry_link_with_metadata`. - """ - return response, metadata - - def pre_get_entry_type(self, request: catalog.GetEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_entry_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_entry_type(self, response: catalog.EntryType) -> catalog.EntryType: - """Post-rpc interceptor for get_entry_type - - DEPRECATED. Please use the `post_get_entry_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_entry_type` interceptor runs - before the `post_get_entry_type_with_metadata` interceptor. - """ - return response - - def post_get_entry_type_with_metadata(self, response: catalog.EntryType, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.EntryType, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_entry_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_entry_type_with_metadata` - interceptor in new development instead of the `post_get_entry_type` interceptor. - When both interceptors are used, this `post_get_entry_type_with_metadata` interceptor runs after the - `post_get_entry_type` interceptor. The (possibly modified) response returned by - `post_get_entry_type` will be passed to - `post_get_entry_type_with_metadata`. - """ - return response, metadata - - def pre_get_metadata_job(self, request: catalog.GetMetadataJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.GetMetadataJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_metadata_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_metadata_job(self, response: catalog.MetadataJob) -> catalog.MetadataJob: - """Post-rpc interceptor for get_metadata_job - - DEPRECATED. Please use the `post_get_metadata_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_get_metadata_job` interceptor runs - before the `post_get_metadata_job_with_metadata` interceptor. - """ - return response - - def post_get_metadata_job_with_metadata(self, response: catalog.MetadataJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.MetadataJob, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_metadata_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_get_metadata_job_with_metadata` - interceptor in new development instead of the `post_get_metadata_job` interceptor. - When both interceptors are used, this `post_get_metadata_job_with_metadata` interceptor runs after the - `post_get_metadata_job` interceptor. The (possibly modified) response returned by - `post_get_metadata_job` will be passed to - `post_get_metadata_job_with_metadata`. - """ - return response, metadata - - def pre_list_aspect_types(self, request: catalog.ListAspectTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListAspectTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_aspect_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_aspect_types(self, response: catalog.ListAspectTypesResponse) -> catalog.ListAspectTypesResponse: - """Post-rpc interceptor for list_aspect_types - - DEPRECATED. Please use the `post_list_aspect_types_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_list_aspect_types` interceptor runs - before the `post_list_aspect_types_with_metadata` interceptor. - """ - return response - - def post_list_aspect_types_with_metadata(self, response: catalog.ListAspectTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListAspectTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_aspect_types - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_list_aspect_types_with_metadata` - interceptor in new development instead of the `post_list_aspect_types` interceptor. - When both interceptors are used, this `post_list_aspect_types_with_metadata` interceptor runs after the - `post_list_aspect_types` interceptor. The (possibly modified) response returned by - `post_list_aspect_types` will be passed to - `post_list_aspect_types_with_metadata`. - """ - return response, metadata - - def pre_list_entries(self, request: catalog.ListEntriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_entries(self, response: catalog.ListEntriesResponse) -> catalog.ListEntriesResponse: - """Post-rpc interceptor for list_entries - - DEPRECATED. Please use the `post_list_entries_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_list_entries` interceptor runs - before the `post_list_entries_with_metadata` interceptor. - """ - return response - - def post_list_entries_with_metadata(self, response: catalog.ListEntriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_entries - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_list_entries_with_metadata` - interceptor in new development instead of the `post_list_entries` interceptor. - When both interceptors are used, this `post_list_entries_with_metadata` interceptor runs after the - `post_list_entries` interceptor. The (possibly modified) response returned by - `post_list_entries` will be passed to - `post_list_entries_with_metadata`. - """ - return response, metadata - - def pre_list_entry_groups(self, request: catalog.ListEntryGroupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryGroupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_entry_groups - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_entry_groups(self, response: catalog.ListEntryGroupsResponse) -> catalog.ListEntryGroupsResponse: - """Post-rpc interceptor for list_entry_groups - - DEPRECATED. Please use the `post_list_entry_groups_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_list_entry_groups` interceptor runs - before the `post_list_entry_groups_with_metadata` interceptor. - """ - return response - - def post_list_entry_groups_with_metadata(self, response: catalog.ListEntryGroupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_entry_groups - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_list_entry_groups_with_metadata` - interceptor in new development instead of the `post_list_entry_groups` interceptor. - When both interceptors are used, this `post_list_entry_groups_with_metadata` interceptor runs after the - `post_list_entry_groups` interceptor. The (possibly modified) response returned by - `post_list_entry_groups` will be passed to - `post_list_entry_groups_with_metadata`. - """ - return response, metadata - - def pre_list_entry_types(self, request: catalog.ListEntryTypesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryTypesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_entry_types - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_entry_types(self, response: catalog.ListEntryTypesResponse) -> catalog.ListEntryTypesResponse: - """Post-rpc interceptor for list_entry_types - - DEPRECATED. Please use the `post_list_entry_types_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_list_entry_types` interceptor runs - before the `post_list_entry_types_with_metadata` interceptor. - """ - return response - - def post_list_entry_types_with_metadata(self, response: catalog.ListEntryTypesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListEntryTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_entry_types - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_list_entry_types_with_metadata` - interceptor in new development instead of the `post_list_entry_types` interceptor. - When both interceptors are used, this `post_list_entry_types_with_metadata` interceptor runs after the - `post_list_entry_types` interceptor. The (possibly modified) response returned by - `post_list_entry_types` will be passed to - `post_list_entry_types_with_metadata`. - """ - return response, metadata - - def pre_list_metadata_jobs(self, request: catalog.ListMetadataJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListMetadataJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_metadata_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_metadata_jobs(self, response: catalog.ListMetadataJobsResponse) -> catalog.ListMetadataJobsResponse: - """Post-rpc interceptor for list_metadata_jobs - - DEPRECATED. Please use the `post_list_metadata_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_list_metadata_jobs` interceptor runs - before the `post_list_metadata_jobs_with_metadata` interceptor. - """ - return response - - def post_list_metadata_jobs_with_metadata(self, response: catalog.ListMetadataJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.ListMetadataJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_metadata_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_list_metadata_jobs_with_metadata` - interceptor in new development instead of the `post_list_metadata_jobs` interceptor. - When both interceptors are used, this `post_list_metadata_jobs_with_metadata` interceptor runs after the - `post_list_metadata_jobs` interceptor. The (possibly modified) response returned by - `post_list_metadata_jobs` will be passed to - `post_list_metadata_jobs_with_metadata`. - """ - return response, metadata - - def pre_lookup_entry(self, request: catalog.LookupEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.LookupEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for lookup_entry - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_lookup_entry(self, response: catalog.Entry) -> catalog.Entry: - """Post-rpc interceptor for lookup_entry - - DEPRECATED. Please use the `post_lookup_entry_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_lookup_entry` interceptor runs - before the `post_lookup_entry_with_metadata` interceptor. - """ - return response - - def post_lookup_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for lookup_entry - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_lookup_entry_with_metadata` - interceptor in new development instead of the `post_lookup_entry` interceptor. - When both interceptors are used, this `post_lookup_entry_with_metadata` interceptor runs after the - `post_lookup_entry` interceptor. The (possibly modified) response returned by - `post_lookup_entry` will be passed to - `post_lookup_entry_with_metadata`. - """ - return response, metadata - - def pre_search_entries(self, request: catalog.SearchEntriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.SearchEntriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_search_entries(self, response: catalog.SearchEntriesResponse) -> catalog.SearchEntriesResponse: - """Post-rpc interceptor for search_entries - - DEPRECATED. Please use the `post_search_entries_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_search_entries` interceptor runs - before the `post_search_entries_with_metadata` interceptor. - """ - return response - - def post_search_entries_with_metadata(self, response: catalog.SearchEntriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.SearchEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_entries - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_search_entries_with_metadata` - interceptor in new development instead of the `post_search_entries` interceptor. - When both interceptors are used, this `post_search_entries_with_metadata` interceptor runs after the - `post_search_entries` interceptor. The (possibly modified) response returned by - `post_search_entries` will be passed to - `post_search_entries_with_metadata`. - """ - return response, metadata - - def pre_update_aspect_type(self, request: catalog.UpdateAspectTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateAspectTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_aspect_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_update_aspect_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_aspect_type - - DEPRECATED. Please use the `post_update_aspect_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_update_aspect_type` interceptor runs - before the `post_update_aspect_type_with_metadata` interceptor. - """ - return response - - def post_update_aspect_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_aspect_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_update_aspect_type_with_metadata` - interceptor in new development instead of the `post_update_aspect_type` interceptor. - When both interceptors are used, this `post_update_aspect_type_with_metadata` interceptor runs after the - `post_update_aspect_type` interceptor. The (possibly modified) response returned by - `post_update_aspect_type` will be passed to - `post_update_aspect_type_with_metadata`. - """ - return response, metadata - - def pre_update_entry(self, request: catalog.UpdateEntryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_entry - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_update_entry(self, response: catalog.Entry) -> catalog.Entry: - """Post-rpc interceptor for update_entry - - DEPRECATED. Please use the `post_update_entry_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_update_entry` interceptor runs - before the `post_update_entry_with_metadata` interceptor. - """ - return response - - def post_update_entry_with_metadata(self, response: catalog.Entry, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.Entry, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_entry - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_update_entry_with_metadata` - interceptor in new development instead of the `post_update_entry` interceptor. - When both interceptors are used, this `post_update_entry_with_metadata` interceptor runs after the - `post_update_entry` interceptor. The (possibly modified) response returned by - `post_update_entry` will be passed to - `post_update_entry_with_metadata`. - """ - return response, metadata - - def pre_update_entry_group(self, request: catalog.UpdateEntryGroupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_entry_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_update_entry_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_entry_group - - DEPRECATED. Please use the `post_update_entry_group_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_update_entry_group` interceptor runs - before the `post_update_entry_group_with_metadata` interceptor. - """ - return response - - def post_update_entry_group_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_entry_group - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_update_entry_group_with_metadata` - interceptor in new development instead of the `post_update_entry_group` interceptor. - When both interceptors are used, this `post_update_entry_group_with_metadata` interceptor runs after the - `post_update_entry_group` interceptor. The (possibly modified) response returned by - `post_update_entry_group` will be passed to - `post_update_entry_group_with_metadata`. - """ - return response, metadata - - def pre_update_entry_type(self, request: catalog.UpdateEntryTypeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[catalog.UpdateEntryTypeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_entry_type - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_update_entry_type(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_entry_type - - DEPRECATED. Please use the `post_update_entry_type_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. This `post_update_entry_type` interceptor runs - before the `post_update_entry_type_with_metadata` interceptor. - """ - return response - - def post_update_entry_type_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_entry_type - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CatalogService server but before it is returned to user code. - - We recommend only using this `post_update_entry_type_with_metadata` - interceptor in new development instead of the `post_update_entry_type` interceptor. - When both interceptors are used, this `post_update_entry_type_with_metadata` interceptor runs after the - `post_update_entry_type` interceptor. The (possibly modified) response returned by - `post_update_entry_type` will be passed to - `post_update_entry_type_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CatalogService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the CatalogService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CatalogServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CatalogServiceRestInterceptor - - -class CatalogServiceRestTransport(_BaseCatalogServiceRestTransport): - """REST backend synchronous transport for CatalogService. - - The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. - They collectively let data administrators organize, manage, - secure, and catalog data located across cloud projects in their - organization in a variety of storage systems, including Cloud - Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CatalogServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CatalogServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelMetadataJob(_BaseCatalogServiceRestTransport._BaseCancelMetadataJob, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CancelMetadataJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CancelMetadataJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the cancel metadata job method over HTTP. - - Args: - request (~.catalog.CancelMetadataJobRequest): - The request object. Cancel metadata job request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_metadata_job(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CancelMetadataJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CancelMetadataJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CancelMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _CreateAspectType(_BaseCatalogServiceRestTransport._BaseCreateAspectType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateAspectType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateAspectTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create aspect type method over HTTP. - - Args: - request (~.catalog.CreateAspectTypeRequest): - The request object. Create AspectType Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_http_options() - - request, metadata = self._interceptor.pre_create_aspect_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateAspectType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateAspectType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateAspectType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_aspect_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_aspect_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateAspectType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateEntry(_BaseCatalogServiceRestTransport._BaseCreateEntry, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateEntry") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateEntryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.Entry: - r"""Call the create entry method over HTTP. - - Args: - request (~.catalog.CreateEntryRequest): - The request object. Create Entry request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_http_options() - - request, metadata = self._interceptor.pre_create_entry(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateEntry._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntry", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.Entry() - pb_resp = catalog.Entry.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_entry(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_entry_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.Entry.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntry", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateEntryGroup(_BaseCatalogServiceRestTransport._BaseCreateEntryGroup, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateEntryGroup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateEntryGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create entry group method over HTTP. - - Args: - request (~.catalog.CreateEntryGroupRequest): - The request object. Create EntryGroup Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_http_options() - - request, metadata = self._interceptor.pre_create_entry_group(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryGroup", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryGroup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_entry_group(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_entry_group_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryGroup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateEntryLink(_BaseCatalogServiceRestTransport._BaseCreateEntryLink, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateEntryLink") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateEntryLinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.EntryLink: - r"""Call the create entry link method over HTTP. - - Args: - request (~.catalog.CreateEntryLinkRequest): - The request object. Request message for CreateEntryLink. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.EntryLink: - EntryLink represents a link between - two Entries. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_http_options() - - request, metadata = self._interceptor.pre_create_entry_link(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryLink", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryLink", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.EntryLink() - pb_resp = catalog.EntryLink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_entry_link(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_entry_link_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.EntryLink.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryLink", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateEntryType(_BaseCatalogServiceRestTransport._BaseCreateEntryType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateEntryType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateEntryTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create entry type method over HTTP. - - Args: - request (~.catalog.CreateEntryTypeRequest): - The request object. Create EntryType Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_http_options() - - request, metadata = self._interceptor.pre_create_entry_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_entry_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_entry_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateEntryType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateMetadataJob(_BaseCatalogServiceRestTransport._BaseCreateMetadataJob, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CreateMetadataJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.CreateMetadataJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create metadata job method over HTTP. - - Args: - request (~.catalog.CreateMetadataJobRequest): - The request object. Create metadata job request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_http_options() - - request, metadata = self._interceptor.pre_create_metadata_job(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateMetadataJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateMetadataJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CreateMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_metadata_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_metadata_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CreateMetadataJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAspectType(_BaseCatalogServiceRestTransport._BaseDeleteAspectType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteAspectType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.DeleteAspectTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete aspect type method over HTTP. - - Args: - request (~.catalog.DeleteAspectTypeRequest): - The request object. Delete AspectType Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_http_options() - - request, metadata = self._interceptor.pre_delete_aspect_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteAspectType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteAspectType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_aspect_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_aspect_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteAspectType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEntry(_BaseCatalogServiceRestTransport._BaseDeleteEntry, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteEntry") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.DeleteEntryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.Entry: - r"""Call the delete entry method over HTTP. - - Args: - request (~.catalog.DeleteEntryRequest): - The request object. Delete Entry request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_http_options() - - request, metadata = self._interceptor.pre_delete_entry(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntry._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntry", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.Entry() - pb_resp = catalog.Entry.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_entry(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_entry_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.Entry.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntry", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEntryGroup(_BaseCatalogServiceRestTransport._BaseDeleteEntryGroup, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteEntryGroup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.DeleteEntryGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete entry group method over HTTP. - - Args: - request (~.catalog.DeleteEntryGroupRequest): - The request object. Delete EntryGroup Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_http_options() - - request, metadata = self._interceptor.pre_delete_entry_group(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryGroup", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryGroup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_entry_group(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_entry_group_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryGroup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEntryLink(_BaseCatalogServiceRestTransport._BaseDeleteEntryLink, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteEntryLink") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.DeleteEntryLinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.EntryLink: - r"""Call the delete entry link method over HTTP. - - Args: - request (~.catalog.DeleteEntryLinkRequest): - The request object. Request message for DeleteEntryLink. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.EntryLink: - EntryLink represents a link between - two Entries. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_http_options() - - request, metadata = self._interceptor.pre_delete_entry_link(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryLink", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryLink", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.EntryLink() - pb_resp = catalog.EntryLink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_entry_link(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_entry_link_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.EntryLink.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryLink", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEntryType(_BaseCatalogServiceRestTransport._BaseDeleteEntryType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteEntryType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.DeleteEntryTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete entry type method over HTTP. - - Args: - request (~.catalog.DeleteEntryTypeRequest): - The request object. Delete EntryType Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_http_options() - - request, metadata = self._interceptor.pre_delete_entry_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_entry_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_entry_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteEntryType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAspectType(_BaseCatalogServiceRestTransport._BaseGetAspectType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetAspectType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetAspectTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.AspectType: - r"""Call the get aspect type method over HTTP. - - Args: - request (~.catalog.GetAspectTypeRequest): - The request object. Get AspectType request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.AspectType: - AspectType is a template for creating - Aspects, and represents the JSON-schema - for a given Entry, for example, BigQuery - Table Schema. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_http_options() - - request, metadata = self._interceptor.pre_get_aspect_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetAspectType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetAspectType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetAspectType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.AspectType() - pb_resp = catalog.AspectType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_aspect_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_aspect_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.AspectType.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetAspectType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEntry(_BaseCatalogServiceRestTransport._BaseGetEntry, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetEntry") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetEntryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.Entry: - r"""Call the get entry method over HTTP. - - Args: - request (~.catalog.GetEntryRequest): - The request object. Get Entry request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetEntry._get_http_options() - - request, metadata = self._interceptor.pre_get_entry(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntry._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetEntry._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntry", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.Entry() - pb_resp = catalog.Entry.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_entry(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_entry_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.Entry.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntry", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEntryGroup(_BaseCatalogServiceRestTransport._BaseGetEntryGroup, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetEntryGroup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetEntryGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.EntryGroup: - r"""Call the get entry group method over HTTP. - - Args: - request (~.catalog.GetEntryGroupRequest): - The request object. Get EntryGroup request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.EntryGroup: - An Entry Group represents a logical - grouping of one or more Entries. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_http_options() - - request, metadata = self._interceptor.pre_get_entry_group(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryGroup", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryGroup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.EntryGroup() - pb_resp = catalog.EntryGroup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_entry_group(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_entry_group_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.EntryGroup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryGroup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEntryLink(_BaseCatalogServiceRestTransport._BaseGetEntryLink, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetEntryLink") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetEntryLinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.EntryLink: - r"""Call the get entry link method over HTTP. - - Args: - request (~.catalog.GetEntryLinkRequest): - The request object. Request message for GetEntryLink. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.EntryLink: - EntryLink represents a link between - two Entries. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_http_options() - - request, metadata = self._interceptor.pre_get_entry_link(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryLink", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryLink", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetEntryLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.EntryLink() - pb_resp = catalog.EntryLink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_entry_link(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_entry_link_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.EntryLink.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryLink", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEntryType(_BaseCatalogServiceRestTransport._BaseGetEntryType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetEntryType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetEntryTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.EntryType: - r"""Call the get entry type method over HTTP. - - Args: - request (~.catalog.GetEntryTypeRequest): - The request object. Get EntryType request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.EntryType: - Entry Type is a template for creating - Entries. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_http_options() - - request, metadata = self._interceptor.pre_get_entry_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetEntryType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.EntryType() - pb_resp = catalog.EntryType.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_entry_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_entry_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.EntryType.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetEntryType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetMetadataJob(_BaseCatalogServiceRestTransport._BaseGetMetadataJob, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetMetadataJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.GetMetadataJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.MetadataJob: - r"""Call the get metadata job method over HTTP. - - Args: - request (~.catalog.GetMetadataJobRequest): - The request object. Get metadata job request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.MetadataJob: - A metadata job resource. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_http_options() - - request, metadata = self._interceptor.pre_get_metadata_job(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetMetadataJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetMetadataJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetMetadataJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.MetadataJob() - pb_resp = catalog.MetadataJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_metadata_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_metadata_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.MetadataJob.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetMetadataJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAspectTypes(_BaseCatalogServiceRestTransport._BaseListAspectTypes, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListAspectTypes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.ListAspectTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.ListAspectTypesResponse: - r"""Call the list aspect types method over HTTP. - - Args: - request (~.catalog.ListAspectTypesRequest): - The request object. List AspectTypes request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.ListAspectTypesResponse: - List AspectTypes response. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_http_options() - - request, metadata = self._interceptor.pre_list_aspect_types(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListAspectTypes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListAspectTypes", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListAspectTypes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListAspectTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.ListAspectTypesResponse() - pb_resp = catalog.ListAspectTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_aspect_types(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_aspect_types_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.ListAspectTypesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListAspectTypes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEntries(_BaseCatalogServiceRestTransport._BaseListEntries, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListEntries") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.ListEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.ListEntriesResponse: - r"""Call the list entries method over HTTP. - - Args: - request (~.catalog.ListEntriesRequest): - The request object. List Entries request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.ListEntriesResponse: - List Entries response. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListEntries._get_http_options() - - request, metadata = self._interceptor.pre_list_entries(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntries._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListEntries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntries", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntries", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListEntries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.ListEntriesResponse() - pb_resp = catalog.ListEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_entries(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_entries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.ListEntriesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entries", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntries", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEntryGroups(_BaseCatalogServiceRestTransport._BaseListEntryGroups, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListEntryGroups") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.ListEntryGroupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.ListEntryGroupsResponse: - r"""Call the list entry groups method over HTTP. - - Args: - request (~.catalog.ListEntryGroupsRequest): - The request object. List entryGroups request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.ListEntryGroupsResponse: - List entry groups response. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_http_options() - - request, metadata = self._interceptor.pre_list_entry_groups(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListEntryGroups._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntryGroups", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntryGroups", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListEntryGroups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.ListEntryGroupsResponse() - pb_resp = catalog.ListEntryGroupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_entry_groups(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_entry_groups_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.ListEntryGroupsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntryGroups", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEntryTypes(_BaseCatalogServiceRestTransport._BaseListEntryTypes, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListEntryTypes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.ListEntryTypesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.ListEntryTypesResponse: - r"""Call the list entry types method over HTTP. - - Args: - request (~.catalog.ListEntryTypesRequest): - The request object. List EntryTypes request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.ListEntryTypesResponse: - List EntryTypes response. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_http_options() - - request, metadata = self._interceptor.pre_list_entry_types(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListEntryTypes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListEntryTypes", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntryTypes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListEntryTypes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.ListEntryTypesResponse() - pb_resp = catalog.ListEntryTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_entry_types(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_entry_types_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.ListEntryTypesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListEntryTypes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListMetadataJobs(_BaseCatalogServiceRestTransport._BaseListMetadataJobs, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListMetadataJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.ListMetadataJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.ListMetadataJobsResponse: - r"""Call the list metadata jobs method over HTTP. - - Args: - request (~.catalog.ListMetadataJobsRequest): - The request object. List metadata jobs request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.ListMetadataJobsResponse: - List metadata jobs response. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_metadata_jobs(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListMetadataJobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListMetadataJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListMetadataJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.ListMetadataJobsResponse() - pb_resp = catalog.ListMetadataJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_metadata_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_metadata_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.ListMetadataJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListMetadataJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _LookupEntry(_BaseCatalogServiceRestTransport._BaseLookupEntry, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.LookupEntry") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.LookupEntryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.Entry: - r"""Call the lookup entry method over HTTP. - - Args: - request (~.catalog.LookupEntryRequest): - The request object. Lookup Entry request using - permissions in the source system. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_http_options() - - request, metadata = self._interceptor.pre_lookup_entry(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseLookupEntry._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.LookupEntry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "LookupEntry", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._LookupEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.Entry() - pb_resp = catalog.Entry.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_lookup_entry(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_lookup_entry_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.Entry.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "LookupEntry", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchEntries(_BaseCatalogServiceRestTransport._BaseSearchEntries, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.SearchEntries") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: catalog.SearchEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.SearchEntriesResponse: - r"""Call the search entries method over HTTP. - - Args: - request (~.catalog.SearchEntriesRequest): - The request object. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.SearchEntriesResponse: - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_http_options() - - request, metadata = self._interceptor.pre_search_entries(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseSearchEntries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.SearchEntries", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "SearchEntries", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._SearchEntries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.SearchEntriesResponse() - pb_resp = catalog.SearchEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_entries(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_entries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.SearchEntriesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.search_entries", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "SearchEntries", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateAspectType(_BaseCatalogServiceRestTransport._BaseUpdateAspectType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.UpdateAspectType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.UpdateAspectTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update aspect type method over HTTP. - - Args: - request (~.catalog.UpdateAspectTypeRequest): - The request object. Update AspectType Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_http_options() - - request, metadata = self._interceptor.pre_update_aspect_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateAspectType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateAspectType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._UpdateAspectType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_aspect_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_aspect_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateAspectType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEntry(_BaseCatalogServiceRestTransport._BaseUpdateEntry, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.UpdateEntry") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.UpdateEntryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> catalog.Entry: - r"""Call the update entry method over HTTP. - - Args: - request (~.catalog.UpdateEntryRequest): - The request object. Update Entry request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.catalog.Entry: - An entry is a representation of a - data resource that can be described by - various metadata. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_http_options() - - request, metadata = self._interceptor.pre_update_entry(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntry._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntry", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._UpdateEntry._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = catalog.Entry() - pb_resp = catalog.Entry.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_entry(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_entry_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = catalog.Entry.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntry", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEntryGroup(_BaseCatalogServiceRestTransport._BaseUpdateEntryGroup, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.UpdateEntryGroup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.UpdateEntryGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update entry group method over HTTP. - - Args: - request (~.catalog.UpdateEntryGroupRequest): - The request object. Update EntryGroup Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_http_options() - - request, metadata = self._interceptor.pre_update_entry_group(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntryGroup", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntryGroup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._UpdateEntryGroup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_entry_group(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_entry_group_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntryGroup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEntryType(_BaseCatalogServiceRestTransport._BaseUpdateEntryType, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.UpdateEntryType") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: catalog.UpdateEntryTypeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update entry type method over HTTP. - - Args: - request (~.catalog.UpdateEntryTypeRequest): - The request object. Update EntryType Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_http_options() - - request, metadata = self._interceptor.pre_update_entry_type(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.UpdateEntryType", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntryType", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._UpdateEntryType._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_entry_type(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_entry_type_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "UpdateEntryType", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def cancel_metadata_job(self) -> Callable[ - [catalog.CancelMetadataJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelMetadataJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_aspect_type(self) -> Callable[ - [catalog.CreateAspectTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAspectType(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_entry(self) -> Callable[ - [catalog.CreateEntryRequest], - catalog.Entry]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEntry(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_entry_group(self) -> Callable[ - [catalog.CreateEntryGroupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEntryGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_entry_link(self) -> Callable[ - [catalog.CreateEntryLinkRequest], - catalog.EntryLink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEntryLink(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_entry_type(self) -> Callable[ - [catalog.CreateEntryTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEntryType(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_metadata_job(self) -> Callable[ - [catalog.CreateMetadataJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateMetadataJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_aspect_type(self) -> Callable[ - [catalog.DeleteAspectTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAspectType(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_entry(self) -> Callable[ - [catalog.DeleteEntryRequest], - catalog.Entry]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEntry(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_entry_group(self) -> Callable[ - [catalog.DeleteEntryGroupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEntryGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_entry_link(self) -> Callable[ - [catalog.DeleteEntryLinkRequest], - catalog.EntryLink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEntryLink(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_entry_type(self) -> Callable[ - [catalog.DeleteEntryTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEntryType(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_aspect_type(self) -> Callable[ - [catalog.GetAspectTypeRequest], - catalog.AspectType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAspectType(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_entry(self) -> Callable[ - [catalog.GetEntryRequest], - catalog.Entry]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEntry(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_entry_group(self) -> Callable[ - [catalog.GetEntryGroupRequest], - catalog.EntryGroup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEntryGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_entry_link(self) -> Callable[ - [catalog.GetEntryLinkRequest], - catalog.EntryLink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEntryLink(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_entry_type(self) -> Callable[ - [catalog.GetEntryTypeRequest], - catalog.EntryType]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEntryType(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_metadata_job(self) -> Callable[ - [catalog.GetMetadataJobRequest], - catalog.MetadataJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetMetadataJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_aspect_types(self) -> Callable[ - [catalog.ListAspectTypesRequest], - catalog.ListAspectTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAspectTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_entries(self) -> Callable[ - [catalog.ListEntriesRequest], - catalog.ListEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_entry_groups(self) -> Callable[ - [catalog.ListEntryGroupsRequest], - catalog.ListEntryGroupsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEntryGroups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_entry_types(self) -> Callable[ - [catalog.ListEntryTypesRequest], - catalog.ListEntryTypesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEntryTypes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_metadata_jobs(self) -> Callable[ - [catalog.ListMetadataJobsRequest], - catalog.ListMetadataJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListMetadataJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def lookup_entry(self) -> Callable[ - [catalog.LookupEntryRequest], - catalog.Entry]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._LookupEntry(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_entries(self) -> Callable[ - [catalog.SearchEntriesRequest], - catalog.SearchEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_aspect_type(self) -> Callable[ - [catalog.UpdateAspectTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAspectType(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_entry(self) -> Callable[ - [catalog.UpdateEntryRequest], - catalog.Entry]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEntry(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_entry_group(self) -> Callable[ - [catalog.UpdateEntryGroupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEntryGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_entry_type(self) -> Callable[ - [catalog.UpdateEntryTypeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEntryType(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseCatalogServiceRestTransport._BaseGetLocation, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseCatalogServiceRestTransport._BaseListLocations, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseCatalogServiceRestTransport._BaseCancelOperation, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseCatalogServiceRestTransport._BaseDeleteOperation, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseCatalogServiceRestTransport._BaseGetOperation, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseCatalogServiceRestTransport._BaseListOperations, CatalogServiceRestStub): - def __hash__(self): - return hash("CatalogServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseCatalogServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCatalogServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCatalogServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CatalogServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CatalogServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CatalogService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'CatalogServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py deleted file mode 100644 index b48f7ba603d4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py +++ /dev/null @@ -1,1451 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import catalog -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseCatalogServiceRestTransport(CatalogServiceTransport): - """Base REST backend transport for CatalogService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCancelMetadataJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CancelMetadataJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCancelMetadataJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateAspectType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "aspectTypeId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/aspectTypes', - 'body': 'aspect_type', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateAspectTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateAspectType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateEntry: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "entryId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entries', - 'body': 'entry', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateEntryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntry._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateEntryGroup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "entryGroupId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/entryGroups', - 'body': 'entry_group', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateEntryGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryGroup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateEntryLink: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "entryLinkId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks', - 'body': 'entry_link', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateEntryLinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateEntryType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "entryTypeId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/entryTypes', - 'body': 'entry_type', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateEntryTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateEntryType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateMetadataJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/metadataJobs', - 'body': 'metadata_job', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.CreateMetadataJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseCreateMetadataJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAspectType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/aspectTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.DeleteAspectTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteAspectType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEntry: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.DeleteEntryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntry._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEntryGroup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.DeleteEntryGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryGroup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEntryLink: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.DeleteEntryLinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEntryType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/entryTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.DeleteEntryTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseDeleteEntryType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAspectType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/aspectTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetAspectTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetAspectType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEntry: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetEntryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntry._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEntryGroup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetEntryGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryGroup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEntryLink: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetEntryLinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryLink._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEntryType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/entryTypes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetEntryTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetEntryType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetMetadataJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/metadataJobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.GetMetadataJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseGetMetadataJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAspectTypes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/aspectTypes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.ListAspectTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseListAspectTypes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEntries: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/entryGroups/*}/entries', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.ListEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseListEntries._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEntryGroups: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/entryGroups', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.ListEntryGroupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseListEntryGroups._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEntryTypes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/entryTypes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.ListEntryTypesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseListEntryTypes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListMetadataJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/metadataJobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.ListMetadataJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseListMetadataJobs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseLookupEntry: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "entry" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}:lookupEntry', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.LookupEntryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseLookupEntry._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchEntries: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "query" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*}:searchEntries', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.SearchEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseSearchEntries._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateAspectType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}', - 'body': 'aspect_type', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.UpdateAspectTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateAspectType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEntry: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}', - 'body': 'entry', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.UpdateEntryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntry._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEntryGroup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}', - 'body': 'entry_group', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.UpdateEntryGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntryGroup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEntryType: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}', - 'body': 'entry_type', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = catalog.UpdateEntryTypeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCatalogServiceRestTransport._BaseUpdateEntryType._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseCatalogServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py deleted file mode 100644 index adda73051cc6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CmekServiceClient -from .async_client import CmekServiceAsyncClient - -__all__ = ( - 'CmekServiceClient', - 'CmekServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py deleted file mode 100644 index 92bf65140225..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/async_client.py +++ /dev/null @@ -1,1216 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.cmek_service import pagers -from google.cloud.dataplex_v1.types import cmek -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CmekServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CmekServiceGrpcAsyncIOTransport -from .client import CmekServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class CmekServiceAsyncClient: - """Dataplex Universal Catalog Customer Managed Encryption Keys - (CMEK) Service - """ - - _client: CmekServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = CmekServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CmekServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = CmekServiceClient._DEFAULT_UNIVERSE - - encryption_config_path = staticmethod(CmekServiceClient.encryption_config_path) - parse_encryption_config_path = staticmethod(CmekServiceClient.parse_encryption_config_path) - organization_location_path = staticmethod(CmekServiceClient.organization_location_path) - parse_organization_location_path = staticmethod(CmekServiceClient.parse_organization_location_path) - common_billing_account_path = staticmethod(CmekServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CmekServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CmekServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(CmekServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(CmekServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(CmekServiceClient.parse_common_organization_path) - common_project_path = staticmethod(CmekServiceClient.common_project_path) - parse_common_project_path = staticmethod(CmekServiceClient.parse_common_project_path) - common_location_path = staticmethod(CmekServiceClient.common_location_path) - parse_common_location_path = staticmethod(CmekServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CmekServiceAsyncClient: The constructed client. - """ - return CmekServiceClient.from_service_account_info.__func__(CmekServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CmekServiceAsyncClient: The constructed client. - """ - return CmekServiceClient.from_service_account_file.__func__(CmekServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CmekServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CmekServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CmekServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = CmekServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CmekServiceTransport, Callable[..., CmekServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cmek service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CmekServiceTransport,Callable[..., CmekServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CmekServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CmekServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.CmekServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "credentialsType": None, - } - ) - - async def create_encryption_config(self, - request: Optional[Union[cmek.CreateEncryptionConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - encryption_config: Optional[cmek.EncryptionConfig] = None, - encryption_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEncryptionConfigRequest( - parent="parent_value", - encryption_config_id="encryption_config_id_value", - ) - - # Make the request - operation = client.create_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest, dict]]): - The request object. Create EncryptionConfig Request - parent (:class:`str`): - Required. The location at which the - EncryptionConfig is to be created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encryption_config (:class:`google.cloud.dataplex_v1.types.EncryptionConfig`): - Required. The EncryptionConfig to - create. - - This corresponds to the ``encryption_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encryption_config_id (:class:`str`): - Required. The ID of the - [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] - to create. Currently, only a value of "default" is - supported. - - This corresponds to the ``encryption_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to - support Customer Managed Encryption Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, encryption_config, encryption_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.CreateEncryptionConfigRequest): - request = cmek.CreateEncryptionConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if encryption_config is not None: - request.encryption_config = encryption_config - if encryption_config_id is not None: - request.encryption_config_id = encryption_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cmek.EncryptionConfig, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_encryption_config(self, - request: Optional[Union[cmek.UpdateEncryptionConfigRequest, dict]] = None, - *, - encryption_config: Optional[cmek.EncryptionConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEncryptionConfigRequest( - ) - - # Make the request - operation = client.update_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest, dict]]): - The request object. Update EncryptionConfig Request - encryption_config (:class:`google.cloud.dataplex_v1.types.EncryptionConfig`): - Required. The EncryptionConfig to - update. - - This corresponds to the ``encryption_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Mask of fields to update. - The service treats an omitted field mask - as an implied field mask equivalent to - all fields that are populated (have a - non-empty value). - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to - support Customer Managed Encryption Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [encryption_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.UpdateEncryptionConfigRequest): - request = cmek.UpdateEncryptionConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if encryption_config is not None: - request.encryption_config = encryption_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("encryption_config.name", request.encryption_config.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cmek.EncryptionConfig, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_encryption_config(self, - request: Optional[Union[cmek.DeleteEncryptionConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest, dict]]): - The request object. Delete EncryptionConfig Request - name (:class:`str`): - Required. The name of the - EncryptionConfig to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.DeleteEncryptionConfigRequest): - request = cmek.DeleteEncryptionConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_encryption_configs(self, - request: Optional[Union[cmek.ListEncryptionConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEncryptionConfigsAsyncPager: - r"""List EncryptionConfigs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_encryption_configs(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEncryptionConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_encryption_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest, dict]]): - The request object. List EncryptionConfigs Request - parent (:class:`str`): - Required. The location for which the - EncryptionConfig is to be listed. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsAsyncPager: - List EncryptionConfigs Response - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.ListEncryptionConfigsRequest): - request = cmek.ListEncryptionConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_encryption_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEncryptionConfigsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_encryption_config(self, - request: Optional[Union[cmek.GetEncryptionConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cmek.EncryptionConfig: - r"""Get an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_encryption_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEncryptionConfigRequest, dict]]): - The request object. Get EncryptionConfig Request - name (:class:`str`): - Required. The name of the - EncryptionConfig to fetch. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EncryptionConfig: - A Resource designed to manage - encryption configurations for customers - to support Customer Managed Encryption - Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.GetEncryptionConfigRequest): - request = cmek.GetEncryptionConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "CmekServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "CmekServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py deleted file mode 100644 index 71a106c85fda..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/client.py +++ /dev/null @@ -1,1592 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.cmek_service import pagers -from google.cloud.dataplex_v1.types import cmek -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CmekServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CmekServiceGrpcTransport -from .transports.grpc_asyncio import CmekServiceGrpcAsyncIOTransport -from .transports.rest import CmekServiceRestTransport - - -class CmekServiceClientMeta(type): - """Metaclass for the CmekService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CmekServiceTransport]] - _transport_registry["grpc"] = CmekServiceGrpcTransport - _transport_registry["grpc_asyncio"] = CmekServiceGrpcAsyncIOTransport - _transport_registry["rest"] = CmekServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CmekServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CmekServiceClient(metaclass=CmekServiceClientMeta): - """Dataplex Universal Catalog Customer Managed Encryption Keys - (CMEK) Service - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CmekServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CmekServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CmekServiceTransport: - """Returns the transport used by the client instance. - - Returns: - CmekServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def encryption_config_path(organization: str,location: str,encryption_config: str,) -> str: - """Returns a fully-qualified encryption_config string.""" - return "organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config}".format(organization=organization, location=location, encryption_config=encryption_config, ) - - @staticmethod - def parse_encryption_config_path(path: str) -> Dict[str,str]: - """Parses a encryption_config path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/encryptionConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def organization_location_path(organization: str,location: str,) -> str: - """Returns a fully-qualified organization_location string.""" - return "organizations/{organization}/locations/{location}".format(organization=organization, location=location, ) - - @staticmethod - def parse_organization_location_path(path: str) -> Dict[str,str]: - """Parses a organization_location path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = CmekServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = CmekServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = CmekServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CmekServiceTransport, Callable[..., CmekServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cmek service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,CmekServiceTransport,Callable[..., CmekServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the CmekServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CmekServiceClient._read_environment_variables() - self._client_cert_source = CmekServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = CmekServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CmekServiceTransport) - if transport_provided: - # transport is a CmekServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(CmekServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - CmekServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[CmekServiceTransport], Callable[..., CmekServiceTransport]] = ( - CmekServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., CmekServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.CmekServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "credentialsType": None, - } - ) - - def create_encryption_config(self, - request: Optional[Union[cmek.CreateEncryptionConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - encryption_config: Optional[cmek.EncryptionConfig] = None, - encryption_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEncryptionConfigRequest( - parent="parent_value", - encryption_config_id="encryption_config_id_value", - ) - - # Make the request - operation = client.create_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest, dict]): - The request object. Create EncryptionConfig Request - parent (str): - Required. The location at which the - EncryptionConfig is to be created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): - Required. The EncryptionConfig to - create. - - This corresponds to the ``encryption_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encryption_config_id (str): - Required. The ID of the - [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] - to create. Currently, only a value of "default" is - supported. - - This corresponds to the ``encryption_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to - support Customer Managed Encryption Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, encryption_config, encryption_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.CreateEncryptionConfigRequest): - request = cmek.CreateEncryptionConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if encryption_config is not None: - request.encryption_config = encryption_config - if encryption_config_id is not None: - request.encryption_config_id = encryption_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cmek.EncryptionConfig, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_encryption_config(self, - request: Optional[Union[cmek.UpdateEncryptionConfigRequest, dict]] = None, - *, - encryption_config: Optional[cmek.EncryptionConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEncryptionConfigRequest( - ) - - # Make the request - operation = client.update_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest, dict]): - The request object. Update EncryptionConfig Request - encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): - Required. The EncryptionConfig to - update. - - This corresponds to the ``encryption_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. - The service treats an omitted field mask - as an implied field mask equivalent to - all fields that are populated (have a - non-empty value). - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.EncryptionConfig` A Resource designed to manage encryption configurations for customers to - support Customer Managed Encryption Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [encryption_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.UpdateEncryptionConfigRequest): - request = cmek.UpdateEncryptionConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if encryption_config is not None: - request.encryption_config = encryption_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("encryption_config.name", request.encryption_config.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cmek.EncryptionConfig, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_encryption_config(self, - request: Optional[Union[cmek.DeleteEncryptionConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest, dict]): - The request object. Delete EncryptionConfig Request - name (str): - Required. The name of the - EncryptionConfig to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.DeleteEncryptionConfigRequest): - request = cmek.DeleteEncryptionConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_encryption_configs(self, - request: Optional[Union[cmek.ListEncryptionConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEncryptionConfigsPager: - r"""List EncryptionConfigs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_encryption_configs(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEncryptionConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_encryption_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest, dict]): - The request object. List EncryptionConfigs Request - parent (str): - Required. The location for which the - EncryptionConfig is to be listed. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsPager: - List EncryptionConfigs Response - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.ListEncryptionConfigsRequest): - request = cmek.ListEncryptionConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_encryption_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEncryptionConfigsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_encryption_config(self, - request: Optional[Union[cmek.GetEncryptionConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cmek.EncryptionConfig: - r"""Get an EncryptionConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_encryption_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEncryptionConfigRequest, dict]): - The request object. Get EncryptionConfig Request - name (str): - Required. The name of the - EncryptionConfig to fetch. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.EncryptionConfig: - A Resource designed to manage - encryption configurations for customers - to support Customer Managed Encryption - Keys (CMEK). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, cmek.GetEncryptionConfigRequest): - request = cmek.GetEncryptionConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_encryption_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CmekServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "CmekServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py deleted file mode 100644 index 093f10c95405..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import cmek - - -class ListEncryptionConfigsPager: - """A pager for iterating through ``list_encryption_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``encryption_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEncryptionConfigs`` requests and continue to iterate - through the ``encryption_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cmek.ListEncryptionConfigsResponse], - request: cmek.ListEncryptionConfigsRequest, - response: cmek.ListEncryptionConfigsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = cmek.ListEncryptionConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cmek.ListEncryptionConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cmek.EncryptionConfig]: - for page in self.pages: - yield from page.encryption_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEncryptionConfigsAsyncPager: - """A pager for iterating through ``list_encryption_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``encryption_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEncryptionConfigs`` requests and continue to iterate - through the ``encryption_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cmek.ListEncryptionConfigsResponse]], - request: cmek.ListEncryptionConfigsRequest, - response: cmek.ListEncryptionConfigsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEncryptionConfigsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = cmek.ListEncryptionConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cmek.ListEncryptionConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cmek.EncryptionConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.encryption_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst deleted file mode 100644 index 911db2245cc9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`CmekServiceTransport` is the ABC for all transports. -- public child `CmekServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `CmekServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseCmekServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `CmekServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py deleted file mode 100644 index 66f1072837c6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CmekServiceTransport -from .grpc import CmekServiceGrpcTransport -from .grpc_asyncio import CmekServiceGrpcAsyncIOTransport -from .rest import CmekServiceRestTransport -from .rest import CmekServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CmekServiceTransport]] -_transport_registry['grpc'] = CmekServiceGrpcTransport -_transport_registry['grpc_asyncio'] = CmekServiceGrpcAsyncIOTransport -_transport_registry['rest'] = CmekServiceRestTransport - -__all__ = ( - 'CmekServiceTransport', - 'CmekServiceGrpcTransport', - 'CmekServiceGrpcAsyncIOTransport', - 'CmekServiceRestTransport', - 'CmekServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py deleted file mode 100644 index 26a858fe4793..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/base.py +++ /dev/null @@ -1,307 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import cmek -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class CmekServiceTransport(abc.ABC): - """Abstract transport class for CmekService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_encryption_config: gapic_v1.method.wrap_method( - self.create_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.update_encryption_config: gapic_v1.method.wrap_method( - self.update_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_encryption_config: gapic_v1.method.wrap_method( - self.delete_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.list_encryption_configs: gapic_v1.method.wrap_method( - self.list_encryption_configs, - default_timeout=None, - client_info=client_info, - ), - self.get_encryption_config: gapic_v1.method.wrap_method( - self.get_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_encryption_config(self) -> Callable[ - [cmek.CreateEncryptionConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_encryption_config(self) -> Callable[ - [cmek.UpdateEncryptionConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_encryption_config(self) -> Callable[ - [cmek.DeleteEncryptionConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_encryption_configs(self) -> Callable[ - [cmek.ListEncryptionConfigsRequest], - Union[ - cmek.ListEncryptionConfigsResponse, - Awaitable[cmek.ListEncryptionConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def get_encryption_config(self) -> Callable[ - [cmek.GetEncryptionConfigRequest], - Union[ - cmek.EncryptionConfig, - Awaitable[cmek.EncryptionConfig] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'CmekServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py deleted file mode 100644 index 21a9803af183..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py +++ /dev/null @@ -1,581 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import cmek -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class CmekServiceGrpcTransport(CmekServiceTransport): - """gRPC backend transport for CmekService. - - Dataplex Universal Catalog Customer Managed Encryption Keys - (CMEK) Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_encryption_config(self) -> Callable[ - [cmek.CreateEncryptionConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the create encryption config method over gRPC. - - Create an EncryptionConfig. - - Returns: - Callable[[~.CreateEncryptionConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_encryption_config' not in self._stubs: - self._stubs['create_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/CreateEncryptionConfig', - request_serializer=cmek.CreateEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_encryption_config'] - - @property - def update_encryption_config(self) -> Callable[ - [cmek.UpdateEncryptionConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the update encryption config method over gRPC. - - Update an EncryptionConfig. - - Returns: - Callable[[~.UpdateEncryptionConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_encryption_config' not in self._stubs: - self._stubs['update_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/UpdateEncryptionConfig', - request_serializer=cmek.UpdateEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_encryption_config'] - - @property - def delete_encryption_config(self) -> Callable[ - [cmek.DeleteEncryptionConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete encryption config method over gRPC. - - Delete an EncryptionConfig. - - Returns: - Callable[[~.DeleteEncryptionConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_encryption_config' not in self._stubs: - self._stubs['delete_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/DeleteEncryptionConfig', - request_serializer=cmek.DeleteEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_encryption_config'] - - @property - def list_encryption_configs(self) -> Callable[ - [cmek.ListEncryptionConfigsRequest], - cmek.ListEncryptionConfigsResponse]: - r"""Return a callable for the list encryption configs method over gRPC. - - List EncryptionConfigs. - - Returns: - Callable[[~.ListEncryptionConfigsRequest], - ~.ListEncryptionConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_encryption_configs' not in self._stubs: - self._stubs['list_encryption_configs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/ListEncryptionConfigs', - request_serializer=cmek.ListEncryptionConfigsRequest.serialize, - response_deserializer=cmek.ListEncryptionConfigsResponse.deserialize, - ) - return self._stubs['list_encryption_configs'] - - @property - def get_encryption_config(self) -> Callable[ - [cmek.GetEncryptionConfigRequest], - cmek.EncryptionConfig]: - r"""Return a callable for the get encryption config method over gRPC. - - Get an EncryptionConfig. - - Returns: - Callable[[~.GetEncryptionConfigRequest], - ~.EncryptionConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_encryption_config' not in self._stubs: - self._stubs['get_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/GetEncryptionConfig', - request_serializer=cmek.GetEncryptionConfigRequest.serialize, - response_deserializer=cmek.EncryptionConfig.deserialize, - ) - return self._stubs['get_encryption_config'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'CmekServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py deleted file mode 100644 index 47e4dc2aadaf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,652 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import cmek -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import CmekServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class CmekServiceGrpcAsyncIOTransport(CmekServiceTransport): - """gRPC AsyncIO backend transport for CmekService. - - Dataplex Universal Catalog Customer Managed Encryption Keys - (CMEK) Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_encryption_config(self) -> Callable[ - [cmek.CreateEncryptionConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create encryption config method over gRPC. - - Create an EncryptionConfig. - - Returns: - Callable[[~.CreateEncryptionConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_encryption_config' not in self._stubs: - self._stubs['create_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/CreateEncryptionConfig', - request_serializer=cmek.CreateEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_encryption_config'] - - @property - def update_encryption_config(self) -> Callable[ - [cmek.UpdateEncryptionConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update encryption config method over gRPC. - - Update an EncryptionConfig. - - Returns: - Callable[[~.UpdateEncryptionConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_encryption_config' not in self._stubs: - self._stubs['update_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/UpdateEncryptionConfig', - request_serializer=cmek.UpdateEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_encryption_config'] - - @property - def delete_encryption_config(self) -> Callable[ - [cmek.DeleteEncryptionConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete encryption config method over gRPC. - - Delete an EncryptionConfig. - - Returns: - Callable[[~.DeleteEncryptionConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_encryption_config' not in self._stubs: - self._stubs['delete_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/DeleteEncryptionConfig', - request_serializer=cmek.DeleteEncryptionConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_encryption_config'] - - @property - def list_encryption_configs(self) -> Callable[ - [cmek.ListEncryptionConfigsRequest], - Awaitable[cmek.ListEncryptionConfigsResponse]]: - r"""Return a callable for the list encryption configs method over gRPC. - - List EncryptionConfigs. - - Returns: - Callable[[~.ListEncryptionConfigsRequest], - Awaitable[~.ListEncryptionConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_encryption_configs' not in self._stubs: - self._stubs['list_encryption_configs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/ListEncryptionConfigs', - request_serializer=cmek.ListEncryptionConfigsRequest.serialize, - response_deserializer=cmek.ListEncryptionConfigsResponse.deserialize, - ) - return self._stubs['list_encryption_configs'] - - @property - def get_encryption_config(self) -> Callable[ - [cmek.GetEncryptionConfigRequest], - Awaitable[cmek.EncryptionConfig]]: - r"""Return a callable for the get encryption config method over gRPC. - - Get an EncryptionConfig. - - Returns: - Callable[[~.GetEncryptionConfigRequest], - Awaitable[~.EncryptionConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_encryption_config' not in self._stubs: - self._stubs['get_encryption_config'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.CmekService/GetEncryptionConfig', - request_serializer=cmek.GetEncryptionConfigRequest.serialize, - response_deserializer=cmek.EncryptionConfig.deserialize, - ) - return self._stubs['get_encryption_config'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_encryption_config: self._wrap_method( - self.create_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.update_encryption_config: self._wrap_method( - self.update_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_encryption_config: self._wrap_method( - self.delete_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.list_encryption_configs: self._wrap_method( - self.list_encryption_configs, - default_timeout=None, - client_info=client_info, - ), - self.get_encryption_config: self._wrap_method( - self.get_encryption_config, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'CmekServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py deleted file mode 100644 index ecfbd2a2a163..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py +++ /dev/null @@ -1,1907 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import cmek -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseCmekServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class CmekServiceRestInterceptor: - """Interceptor for CmekService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CmekServiceRestTransport. - - .. code-block:: python - class MyCustomCmekServiceInterceptor(CmekServiceRestInterceptor): - def pre_create_encryption_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_encryption_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_encryption_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_encryption_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_encryption_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_encryption_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_encryption_configs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_encryption_configs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_encryption_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_encryption_config(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CmekServiceRestTransport(interceptor=MyCustomCmekServiceInterceptor()) - client = CmekServiceClient(transport=transport) - - - """ - def pre_create_encryption_config(self, request: cmek.CreateEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.CreateEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_encryption_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_create_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_encryption_config - - DEPRECATED. Please use the `post_create_encryption_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. This `post_create_encryption_config` interceptor runs - before the `post_create_encryption_config_with_metadata` interceptor. - """ - return response - - def post_create_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_encryption_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CmekService server but before it is returned to user code. - - We recommend only using this `post_create_encryption_config_with_metadata` - interceptor in new development instead of the `post_create_encryption_config` interceptor. - When both interceptors are used, this `post_create_encryption_config_with_metadata` interceptor runs after the - `post_create_encryption_config` interceptor. The (possibly modified) response returned by - `post_create_encryption_config` will be passed to - `post_create_encryption_config_with_metadata`. - """ - return response, metadata - - def pre_delete_encryption_config(self, request: cmek.DeleteEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.DeleteEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_encryption_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_delete_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_encryption_config - - DEPRECATED. Please use the `post_delete_encryption_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. This `post_delete_encryption_config` interceptor runs - before the `post_delete_encryption_config_with_metadata` interceptor. - """ - return response - - def post_delete_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_encryption_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CmekService server but before it is returned to user code. - - We recommend only using this `post_delete_encryption_config_with_metadata` - interceptor in new development instead of the `post_delete_encryption_config` interceptor. - When both interceptors are used, this `post_delete_encryption_config_with_metadata` interceptor runs after the - `post_delete_encryption_config` interceptor. The (possibly modified) response returned by - `post_delete_encryption_config` will be passed to - `post_delete_encryption_config_with_metadata`. - """ - return response, metadata - - def pre_get_encryption_config(self, request: cmek.GetEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.GetEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_encryption_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_get_encryption_config(self, response: cmek.EncryptionConfig) -> cmek.EncryptionConfig: - """Post-rpc interceptor for get_encryption_config - - DEPRECATED. Please use the `post_get_encryption_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. This `post_get_encryption_config` interceptor runs - before the `post_get_encryption_config_with_metadata` interceptor. - """ - return response - - def post_get_encryption_config_with_metadata(self, response: cmek.EncryptionConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.EncryptionConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_encryption_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CmekService server but before it is returned to user code. - - We recommend only using this `post_get_encryption_config_with_metadata` - interceptor in new development instead of the `post_get_encryption_config` interceptor. - When both interceptors are used, this `post_get_encryption_config_with_metadata` interceptor runs after the - `post_get_encryption_config` interceptor. The (possibly modified) response returned by - `post_get_encryption_config` will be passed to - `post_get_encryption_config_with_metadata`. - """ - return response, metadata - - def pre_list_encryption_configs(self, request: cmek.ListEncryptionConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.ListEncryptionConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_encryption_configs - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_list_encryption_configs(self, response: cmek.ListEncryptionConfigsResponse) -> cmek.ListEncryptionConfigsResponse: - """Post-rpc interceptor for list_encryption_configs - - DEPRECATED. Please use the `post_list_encryption_configs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. This `post_list_encryption_configs` interceptor runs - before the `post_list_encryption_configs_with_metadata` interceptor. - """ - return response - - def post_list_encryption_configs_with_metadata(self, response: cmek.ListEncryptionConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.ListEncryptionConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_encryption_configs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CmekService server but before it is returned to user code. - - We recommend only using this `post_list_encryption_configs_with_metadata` - interceptor in new development instead of the `post_list_encryption_configs` interceptor. - When both interceptors are used, this `post_list_encryption_configs_with_metadata` interceptor runs after the - `post_list_encryption_configs` interceptor. The (possibly modified) response returned by - `post_list_encryption_configs` will be passed to - `post_list_encryption_configs_with_metadata`. - """ - return response, metadata - - def pre_update_encryption_config(self, request: cmek.UpdateEncryptionConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cmek.UpdateEncryptionConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_encryption_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_update_encryption_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_encryption_config - - DEPRECATED. Please use the `post_update_encryption_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. This `post_update_encryption_config` interceptor runs - before the `post_update_encryption_config_with_metadata` interceptor. - """ - return response - - def post_update_encryption_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_encryption_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the CmekService server but before it is returned to user code. - - We recommend only using this `post_update_encryption_config_with_metadata` - interceptor in new development instead of the `post_update_encryption_config` interceptor. - When both interceptors are used, this `post_update_encryption_config_with_metadata` interceptor runs after the - `post_update_encryption_config` interceptor. The (possibly modified) response returned by - `post_update_encryption_config` will be passed to - `post_update_encryption_config_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CmekService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the CmekService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CmekServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CmekServiceRestInterceptor - - -class CmekServiceRestTransport(_BaseCmekServiceRestTransport): - """REST backend synchronous transport for CmekService. - - Dataplex Universal Catalog Customer Managed Encryption Keys - (CMEK) Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CmekServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CmekServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateEncryptionConfig(_BaseCmekServiceRestTransport._BaseCreateEncryptionConfig, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.CreateEncryptionConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: cmek.CreateEncryptionConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create encryption config method over HTTP. - - Args: - request (~.cmek.CreateEncryptionConfigRequest): - The request object. Create EncryptionConfig Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_encryption_config(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_transcoded_request(http_options, request) - - body = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.CreateEncryptionConfig", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "CreateEncryptionConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._CreateEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_encryption_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_encryption_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceClient.create_encryption_config", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "CreateEncryptionConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEncryptionConfig(_BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.DeleteEncryptionConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: cmek.DeleteEncryptionConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete encryption config method over HTTP. - - Args: - request (~.cmek.DeleteEncryptionConfigRequest): - The request object. Delete EncryptionConfig Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_http_options() - - request, metadata = self._interceptor.pre_delete_encryption_config(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.DeleteEncryptionConfig", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "DeleteEncryptionConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._DeleteEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_encryption_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_encryption_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceClient.delete_encryption_config", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "DeleteEncryptionConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEncryptionConfig(_BaseCmekServiceRestTransport._BaseGetEncryptionConfig, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.GetEncryptionConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: cmek.GetEncryptionConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cmek.EncryptionConfig: - r"""Call the get encryption config method over HTTP. - - Args: - request (~.cmek.GetEncryptionConfigRequest): - The request object. Get EncryptionConfig Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.cmek.EncryptionConfig: - A Resource designed to manage - encryption configurations for customers - to support Customer Managed Encryption - Keys (CMEK). - - """ - - http_options = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_encryption_config(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetEncryptionConfig", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetEncryptionConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._GetEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cmek.EncryptionConfig() - pb_resp = cmek.EncryptionConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_encryption_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_encryption_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = cmek.EncryptionConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceClient.get_encryption_config", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetEncryptionConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEncryptionConfigs(_BaseCmekServiceRestTransport._BaseListEncryptionConfigs, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.ListEncryptionConfigs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: cmek.ListEncryptionConfigsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cmek.ListEncryptionConfigsResponse: - r"""Call the list encryption configs method over HTTP. - - Args: - request (~.cmek.ListEncryptionConfigsRequest): - The request object. List EncryptionConfigs Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.cmek.ListEncryptionConfigsResponse: - List EncryptionConfigs Response - """ - - http_options = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_http_options() - - request, metadata = self._interceptor.pre_list_encryption_configs(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListEncryptionConfigs", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListEncryptionConfigs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._ListEncryptionConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cmek.ListEncryptionConfigsResponse() - pb_resp = cmek.ListEncryptionConfigsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_encryption_configs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_encryption_configs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = cmek.ListEncryptionConfigsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceClient.list_encryption_configs", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListEncryptionConfigs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEncryptionConfig(_BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.UpdateEncryptionConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: cmek.UpdateEncryptionConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update encryption config method over HTTP. - - Args: - request (~.cmek.UpdateEncryptionConfigRequest): - The request object. Update EncryptionConfig Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_encryption_config(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_transcoded_request(http_options, request) - - body = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.UpdateEncryptionConfig", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "UpdateEncryptionConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._UpdateEncryptionConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_encryption_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_encryption_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceClient.update_encryption_config", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "UpdateEncryptionConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_encryption_config(self) -> Callable[ - [cmek.CreateEncryptionConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_encryption_config(self) -> Callable[ - [cmek.DeleteEncryptionConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_encryption_config(self) -> Callable[ - [cmek.GetEncryptionConfigRequest], - cmek.EncryptionConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_encryption_configs(self) -> Callable[ - [cmek.ListEncryptionConfigsRequest], - cmek.ListEncryptionConfigsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEncryptionConfigs(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_encryption_config(self) -> Callable[ - [cmek.UpdateEncryptionConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEncryptionConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseCmekServiceRestTransport._BaseGetLocation, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseCmekServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseCmekServiceRestTransport._BaseListLocations, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseCmekServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseCmekServiceRestTransport._BaseCancelOperation, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseCmekServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseCmekServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseCmekServiceRestTransport._BaseDeleteOperation, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseCmekServiceRestTransport._BaseGetOperation, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseCmekServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseCmekServiceRestTransport._BaseListOperations, CmekServiceRestStub): - def __hash__(self): - return hash("CmekServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseCmekServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCmekServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseCmekServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.CmekServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = CmekServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.CmekServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.CmekService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'CmekServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py deleted file mode 100644 index b986fd0e4523..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py +++ /dev/null @@ -1,472 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import CmekServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import cmek -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseCmekServiceRestTransport(CmekServiceTransport): - """Base REST backend transport for CmekService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateEncryptionConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "encryptionConfigId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=organizations/*/locations/*}/encryptionConfigs', - 'body': 'encryption_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = cmek.CreateEncryptionConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCmekServiceRestTransport._BaseCreateEncryptionConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEncryptionConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/encryptionConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = cmek.DeleteEncryptionConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCmekServiceRestTransport._BaseDeleteEncryptionConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEncryptionConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/encryptionConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = cmek.GetEncryptionConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCmekServiceRestTransport._BaseGetEncryptionConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEncryptionConfigs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=organizations/*/locations/*}/encryptionConfigs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = cmek.ListEncryptionConfigsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCmekServiceRestTransport._BaseListEncryptionConfigs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEncryptionConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{encryption_config.name=organizations/*/locations/*/encryptionConfigs/*}', - 'body': 'encryption_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = cmek.UpdateEncryptionConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseCmekServiceRestTransport._BaseUpdateEncryptionConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseCmekServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py deleted file mode 100644 index ba661ddf8d6b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ContentServiceClient -from .async_client import ContentServiceAsyncClient - -__all__ = ( - 'ContentServiceClient', - 'ContentServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py deleted file mode 100644 index 22b3c5cd4ccb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/async_client.py +++ /dev/null @@ -1,1497 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport -from .client import ContentServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class ContentServiceAsyncClient: - """ContentService manages Notebook and SQL Scripts for Dataplex - Universal Catalog. - """ - - _client: ContentServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ContentServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ContentServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ContentServiceClient._DEFAULT_UNIVERSE - - content_path = staticmethod(ContentServiceClient.content_path) - parse_content_path = staticmethod(ContentServiceClient.parse_content_path) - lake_path = staticmethod(ContentServiceClient.lake_path) - parse_lake_path = staticmethod(ContentServiceClient.parse_lake_path) - common_billing_account_path = staticmethod(ContentServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ContentServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ContentServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ContentServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ContentServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ContentServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ContentServiceClient.common_project_path) - parse_common_project_path = staticmethod(ContentServiceClient.parse_common_project_path) - common_location_path = staticmethod(ContentServiceClient.common_location_path) - parse_common_location_path = staticmethod(ContentServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceAsyncClient: The constructed client. - """ - return ContentServiceClient.from_service_account_info.__func__(ContentServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceAsyncClient: The constructed client. - """ - return ContentServiceClient.from_service_account_file.__func__(ContentServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ContentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ContentServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContentServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ContentServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the content service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContentServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ContentServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.ContentServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "credentialsType": None, - } - ) - - async def create_content(self, - request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - content: Optional[analyze.Content] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Create a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = await client.create_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]]): - The request object. Create content request. - parent (:class:`str`): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (:class:`google.cloud.dataplex_v1.types.Content`): - Required. Content resource. - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, content] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.CreateContentRequest): - request = gcd_content.CreateContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if content is not None: - request.content = content - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_content(self, - request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, - *, - content: Optional[analyze.Content] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Update a content. Only supports full resource update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = await client.update_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]]): - The request object. Update content request. - content (:class:`google.cloud.dataplex_v1.types.Content`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [content, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.UpdateContentRequest): - request = gcd_content.UpdateContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if content is not None: - request.content = content - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("content.name", request.content.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_content(self, - request: Optional[Union[content.DeleteContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - await client.delete_content(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]]): - The request object. Delete content request. - name (:class:`str`): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.DeleteContentRequest): - request = content.DeleteContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_content(self, - request: Optional[Union[content.GetContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Get a content resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_content(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]]): - The request object. Get content request. - name (:class:`str`): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.GetContentRequest): - request = content.GetContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_content(self, - request: Optional[Union[content.ListContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListContentAsyncPager: - r"""List content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]]): - The request object. List content request. Returns the - BASIC Content view. - parent (:class:`str`): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: - List content response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.ListContentRequest): - request = content.ListContentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListContentAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ContentServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "ContentServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py deleted file mode 100644 index 62103cb0645e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/client.py +++ /dev/null @@ -1,1878 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ContentServiceGrpcTransport -from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport -from .transports.rest import ContentServiceRestTransport - - -class ContentServiceClientMeta(type): - """Metaclass for the ContentService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] - _transport_registry["grpc"] = ContentServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport - _transport_registry["rest"] = ContentServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ContentServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ContentServiceClient(metaclass=ContentServiceClientMeta): - """ContentService manages Notebook and SQL Scripts for Dataplex - Universal Catalog. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ContentServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ContentServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ContentServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def content_path(project: str,location: str,lake: str,content: str,) -> str: - """Returns a fully-qualified content string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) - - @staticmethod - def parse_content_path(path: str) -> Dict[str,str]: - """Parses a content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/content/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lake_path(project: str,location: str,lake: str,) -> str: - """Returns a fully-qualified lake string.""" - return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - - @staticmethod - def parse_lake_path(path: str) -> Dict[str,str]: - """Parses a lake path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ContentServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ContentServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ContentServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ContentServiceTransport, Callable[..., ContentServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the content service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ContentServiceTransport,Callable[..., ContentServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ContentServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ContentServiceClient._read_environment_variables() - self._client_cert_source = ContentServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ContentServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ContentServiceTransport) - if transport_provided: - # transport is a ContentServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ContentServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ContentServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ContentServiceTransport], Callable[..., ContentServiceTransport]] = ( - ContentServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ContentServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.ContentServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "credentialsType": None, - } - ) - - def create_content(self, - request: Optional[Union[gcd_content.CreateContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - content: Optional[analyze.Content] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Create a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = client.create_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): - The request object. Create content request. - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - content (google.cloud.dataplex_v1.types.Content): - Required. Content resource. - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, content] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.CreateContentRequest): - request = gcd_content.CreateContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if content is not None: - request.content = content - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_content(self, - request: Optional[Union[gcd_content.UpdateContentRequest, dict]] = None, - *, - content: Optional[analyze.Content] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Update a content. Only supports full resource update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = client.update_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): - The request object. Update content request. - content (google.cloud.dataplex_v1.types.Content): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``content`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [content, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_content.UpdateContentRequest): - request = gcd_content.UpdateContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if content is not None: - request.content = content - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("content.name", request.content.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_content(self, - request: Optional[Union[content.DeleteContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - client.delete_content(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): - The request object. Delete content request. - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.DeleteContentRequest): - request = content.DeleteContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_content(self, - request: Optional[Union[content.GetContentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Content: - r"""Get a content resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = client.get_content(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): - The request object. Get content request. - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Content: - Content represents a user-visible - notebook or a sql script - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.GetContentRequest): - request = content.GetContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_content(self, - request: Optional[Union[content.ListContentRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListContentPager: - r"""List content. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): - The request object. List content request. Returns the - BASIC Content view. - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: - List content response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, content.ListContentRequest): - request = content.ListContentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListContentPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ContentServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "ContentServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py deleted file mode 100644 index 2892d71ac16d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/pagers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content - - -class ListContentPager: - """A pager for iterating through ``list_content`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and - provides an ``__iter__`` method to iterate through its - ``content`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListContent`` requests and continue to iterate - through the ``content`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., content.ListContentResponse], - request: content.ListContentRequest, - response: content.ListContentResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListContentRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListContentResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = content.ListContentRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[content.ListContentResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Content]: - for page in self.pages: - yield from page.content - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListContentAsyncPager: - """A pager for iterating through ``list_content`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``content`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListContent`` requests and continue to iterate - through the ``content`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[content.ListContentResponse]], - request: content.ListContentRequest, - response: content.ListContentResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListContentRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListContentResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = content.ListContentRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[content.ListContentResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Content]: - async def async_generator(): - async for page in self.pages: - for response in page.content: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst deleted file mode 100644 index f737919bf8e5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ContentServiceTransport` is the ABC for all transports. -- public child `ContentServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ContentServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseContentServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ContentServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py deleted file mode 100644 index f5b74440a74f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ContentServiceTransport -from .grpc import ContentServiceGrpcTransport -from .grpc_asyncio import ContentServiceGrpcAsyncIOTransport -from .rest import ContentServiceRestTransport -from .rest import ContentServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] -_transport_registry['grpc'] = ContentServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ContentServiceGrpcAsyncIOTransport -_transport_registry['rest'] = ContentServiceRestTransport - -__all__ = ( - 'ContentServiceTransport', - 'ContentServiceGrpcTransport', - 'ContentServiceGrpcAsyncIOTransport', - 'ContentServiceRestTransport', - 'ContentServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py deleted file mode 100644 index 1482b56942bd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/base.py +++ /dev/null @@ -1,382 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class ContentServiceTransport(abc.ABC): - """Abstract transport class for ContentService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_content: gapic_v1.method.wrap_method( - self.create_content, - default_timeout=60.0, - client_info=client_info, - ), - self.update_content: gapic_v1.method.wrap_method( - self.update_content, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_content: gapic_v1.method.wrap_method( - self.delete_content, - default_timeout=60.0, - client_info=client_info, - ), - self.get_content: gapic_v1.method.wrap_method( - self.get_content, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_content: gapic_v1.method.wrap_method( - self.list_content, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - Union[ - analyze.Content, - Awaitable[analyze.Content] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - Union[ - content.ListContentResponse, - Awaitable[content.ListContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ContentServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py deleted file mode 100644 index 6e90fda5e2cf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ /dev/null @@ -1,664 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ContentServiceGrpcTransport(ContentServiceTransport): - """gRPC backend transport for ContentService. - - ContentService manages Notebook and SQL Scripts for Dataplex - Universal Catalog. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - analyze.Content]: - r"""Return a callable for the create content method over gRPC. - - Create a content. - - Returns: - Callable[[~.CreateContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_content' not in self._stubs: - self._stubs['create_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/CreateContent', - request_serializer=gcd_content.CreateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['create_content'] - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - analyze.Content]: - r"""Return a callable for the update content method over gRPC. - - Update a content. Only supports full resource update. - - Returns: - Callable[[~.UpdateContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_content' not in self._stubs: - self._stubs['update_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/UpdateContent', - request_serializer=gcd_content.UpdateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['update_content'] - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete content method over gRPC. - - Delete a content. - - Returns: - Callable[[~.DeleteContentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_content' not in self._stubs: - self._stubs['delete_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/DeleteContent', - request_serializer=content.DeleteContentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_content'] - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - analyze.Content]: - r"""Return a callable for the get content method over gRPC. - - Get a content resource. - - Returns: - Callable[[~.GetContentRequest], - ~.Content]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_content' not in self._stubs: - self._stubs['get_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetContent', - request_serializer=content.GetContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['get_content'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - content.ListContentResponse]: - r"""Return a callable for the list content method over gRPC. - - List content. - - Returns: - Callable[[~.ListContentRequest], - ~.ListContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_content' not in self._stubs: - self._stubs['list_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/ListContent', - request_serializer=content.ListContentRequest.serialize, - response_deserializer=content.ListContentResponse.deserialize, - ) - return self._stubs['list_content'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ContentServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py deleted file mode 100644 index 10defeeed3a7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,786 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ContentServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): - """gRPC AsyncIO backend transport for ContentService. - - ContentService manages Notebook and SQL Scripts for Dataplex - Universal Catalog. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the create content method over gRPC. - - Create a content. - - Returns: - Callable[[~.CreateContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_content' not in self._stubs: - self._stubs['create_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/CreateContent', - request_serializer=gcd_content.CreateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['create_content'] - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the update content method over gRPC. - - Update a content. Only supports full resource update. - - Returns: - Callable[[~.UpdateContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_content' not in self._stubs: - self._stubs['update_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/UpdateContent', - request_serializer=gcd_content.UpdateContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['update_content'] - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete content method over gRPC. - - Delete a content. - - Returns: - Callable[[~.DeleteContentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_content' not in self._stubs: - self._stubs['delete_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/DeleteContent', - request_serializer=content.DeleteContentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_content'] - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - Awaitable[analyze.Content]]: - r"""Return a callable for the get content method over gRPC. - - Get a content resource. - - Returns: - Callable[[~.GetContentRequest], - Awaitable[~.Content]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_content' not in self._stubs: - self._stubs['get_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetContent', - request_serializer=content.GetContentRequest.serialize, - response_deserializer=analyze.Content.deserialize, - ) - return self._stubs['get_content'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a contentitem resource. A - ``NOT_FOUND`` error is returned if the resource does not exist. - An empty policy is returned if the resource exists but does not - have a policy set on it. - - Caller must have Google IAM ``dataplex.content.getIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified contentitem - resource. Replaces any existing policy. - - Caller must have Google IAM ``dataplex.content.setIamPolicy`` - permission on the resource. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (a - ``NOT_FOUND`` error is not returned). - - A caller is not required to have Google IAM permission to make - this request. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - Awaitable[content.ListContentResponse]]: - r"""Return a callable for the list content method over gRPC. - - List content. - - Returns: - Callable[[~.ListContentRequest], - Awaitable[~.ListContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_content' not in self._stubs: - self._stubs['list_content'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.ContentService/ListContent', - request_serializer=content.ListContentRequest.serialize, - response_deserializer=content.ListContentResponse.deserialize, - ) - return self._stubs['list_content'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_content: self._wrap_method( - self.create_content, - default_timeout=60.0, - client_info=client_info, - ), - self.update_content: self._wrap_method( - self.update_content, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_content: self._wrap_method( - self.delete_content, - default_timeout=60.0, - client_info=client_info, - ), - self.get_content: self._wrap_method( - self.get_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_content: self._wrap_method( - self.list_content, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'ContentServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py deleted file mode 100644 index 3abbe49f6b41..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest.py +++ /dev/null @@ -1,2446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseContentServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class ContentServiceRestInterceptor: - """Interceptor for ContentService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ContentServiceRestTransport. - - .. code-block:: python - class MyCustomContentServiceInterceptor(ContentServiceRestInterceptor): - def pre_create_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_content(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_content(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_content(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ContentServiceRestTransport(interceptor=MyCustomContentServiceInterceptor()) - client = ContentServiceClient(transport=transport) - - - """ - def pre_create_content(self, request: gcd_content.CreateContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_content.CreateContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_create_content(self, response: analyze.Content) -> analyze.Content: - """Post-rpc interceptor for create_content - - DEPRECATED. Please use the `post_create_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_create_content` interceptor runs - before the `post_create_content_with_metadata` interceptor. - """ - return response - - def post_create_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_create_content_with_metadata` - interceptor in new development instead of the `post_create_content` interceptor. - When both interceptors are used, this `post_create_content_with_metadata` interceptor runs after the - `post_create_content` interceptor. The (possibly modified) response returned by - `post_create_content` will be passed to - `post_create_content_with_metadata`. - """ - return response, metadata - - def pre_delete_content(self, request: content.DeleteContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.DeleteContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def pre_get_content(self, request: content.GetContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.GetContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_get_content(self, response: analyze.Content) -> analyze.Content: - """Post-rpc interceptor for get_content - - DEPRECATED. Please use the `post_get_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_get_content` interceptor runs - before the `post_get_content_with_metadata` interceptor. - """ - return response - - def post_get_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_get_content_with_metadata` - interceptor in new development instead of the `post_get_content` interceptor. - When both interceptors are used, this `post_get_content_with_metadata` interceptor runs after the - `post_get_content` interceptor. The (possibly modified) response returned by - `post_get_content` will be passed to - `post_get_content_with_metadata`. - """ - return response, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_list_content(self, request: content.ListContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.ListContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_list_content(self, response: content.ListContentResponse) -> content.ListContentResponse: - """Post-rpc interceptor for list_content - - DEPRECATED. Please use the `post_list_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_list_content` interceptor runs - before the `post_list_content_with_metadata` interceptor. - """ - return response - - def post_list_content_with_metadata(self, response: content.ListContentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[content.ListContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_list_content_with_metadata` - interceptor in new development instead of the `post_list_content` interceptor. - When both interceptors are used, this `post_list_content_with_metadata` interceptor runs after the - `post_list_content` interceptor. The (possibly modified) response returned by - `post_list_content` will be passed to - `post_list_content_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_content(self, request: gcd_content.UpdateContentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_content.UpdateContentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_content - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_update_content(self, response: analyze.Content) -> analyze.Content: - """Post-rpc interceptor for update_content - - DEPRECATED. Please use the `post_update_content_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. This `post_update_content` interceptor runs - before the `post_update_content_with_metadata` interceptor. - """ - return response - - def post_update_content_with_metadata(self, response: analyze.Content, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Content, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_content - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ContentService server but before it is returned to user code. - - We recommend only using this `post_update_content_with_metadata` - interceptor in new development instead of the `post_update_content` interceptor. - When both interceptors are used, this `post_update_content_with_metadata` interceptor runs after the - `post_update_content` interceptor. The (possibly modified) response returned by - `post_update_content` will be passed to - `post_update_content_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContentService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the ContentService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ContentServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ContentServiceRestInterceptor - - -class ContentServiceRestTransport(_BaseContentServiceRestTransport): - """REST backend synchronous transport for ContentService. - - ContentService manages Notebook and SQL Scripts for Dataplex - Universal Catalog. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ContentServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ContentServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateContent(_BaseContentServiceRestTransport._BaseCreateContent, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.CreateContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcd_content.CreateContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> analyze.Content: - r"""Call the create content method over HTTP. - - Args: - request (~.gcd_content.CreateContentRequest): - The request object. Create content request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.analyze.Content: - Content represents a user-visible - notebook or a sql script - - """ - - http_options = _BaseContentServiceRestTransport._BaseCreateContent._get_http_options() - - request, metadata = self._interceptor.pre_create_content(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseCreateContent._get_transcoded_request(http_options, request) - - body = _BaseContentServiceRestTransport._BaseCreateContent._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseCreateContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.CreateContent", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "CreateContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._CreateContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = analyze.Content() - pb_resp = analyze.Content.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = analyze.Content.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.create_content", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "CreateContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteContent(_BaseContentServiceRestTransport._BaseDeleteContent, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.DeleteContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: content.DeleteContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete content method over HTTP. - - Args: - request (~.content.DeleteContentRequest): - The request object. Delete content request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseContentServiceRestTransport._BaseDeleteContent._get_http_options() - - request, metadata = self._interceptor.pre_delete_content(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseDeleteContent._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseDeleteContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.DeleteContent", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "DeleteContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._DeleteContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetContent(_BaseContentServiceRestTransport._BaseGetContent, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.GetContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: content.GetContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> analyze.Content: - r"""Call the get content method over HTTP. - - Args: - request (~.content.GetContentRequest): - The request object. Get content request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.analyze.Content: - Content represents a user-visible - notebook or a sql script - - """ - - http_options = _BaseContentServiceRestTransport._BaseGetContent._get_http_options() - - request, metadata = self._interceptor.pre_get_content(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseGetContent._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseGetContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetContent", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._GetContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = analyze.Content() - pb_resp = analyze.Content.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = analyze.Content.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.get_content", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIamPolicy(_BaseContentServiceRestTransport._BaseGetIamPolicy, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListContent(_BaseContentServiceRestTransport._BaseListContent, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.ListContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: content.ListContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> content.ListContentResponse: - r"""Call the list content method over HTTP. - - Args: - request (~.content.ListContentRequest): - The request object. List content request. Returns the - BASIC Content view. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.content.ListContentResponse: - List content response. - """ - - http_options = _BaseContentServiceRestTransport._BaseListContent._get_http_options() - - request, metadata = self._interceptor.pre_list_content(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseListContent._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseListContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListContent", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._ListContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = content.ListContentResponse() - pb_resp = content.ListContentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = content.ListContentResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.list_content", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseContentServiceRestTransport._BaseSetIamPolicy, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseContentServiceRestTransport._BaseTestIamPermissions, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateContent(_BaseContentServiceRestTransport._BaseUpdateContent, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.UpdateContent") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcd_content.UpdateContentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> analyze.Content: - r"""Call the update content method over HTTP. - - Args: - request (~.gcd_content.UpdateContentRequest): - The request object. Update content request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.analyze.Content: - Content represents a user-visible - notebook or a sql script - - """ - - http_options = _BaseContentServiceRestTransport._BaseUpdateContent._get_http_options() - - request, metadata = self._interceptor.pre_update_content(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseUpdateContent._get_transcoded_request(http_options, request) - - body = _BaseContentServiceRestTransport._BaseUpdateContent._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseUpdateContent._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.UpdateContent", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "UpdateContent", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._UpdateContent._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = analyze.Content() - pb_resp = analyze.Content.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_content(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_content_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = analyze.Content.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceClient.update_content", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "UpdateContent", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_content(self) -> Callable[ - [gcd_content.CreateContentRequest], - analyze.Content]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_content(self) -> Callable[ - [content.DeleteContentRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_content(self) -> Callable[ - [content.GetContentRequest], - analyze.Content]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_content(self) -> Callable[ - [content.ListContentRequest], - content.ListContentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_content(self) -> Callable[ - [gcd_content.UpdateContentRequest], - analyze.Content]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateContent(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseContentServiceRestTransport._BaseGetLocation, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseContentServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseContentServiceRestTransport._BaseListLocations, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseContentServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseContentServiceRestTransport._BaseCancelOperation, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseContentServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseContentServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseContentServiceRestTransport._BaseDeleteOperation, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseContentServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseContentServiceRestTransport._BaseGetOperation, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseContentServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseContentServiceRestTransport._BaseListOperations, ContentServiceRestStub): - def __hash__(self): - return hash("ContentServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseContentServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseContentServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseContentServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.ContentServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ContentServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.ContentServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.ContentService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ContentServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py deleted file mode 100644 index eab30664870c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py +++ /dev/null @@ -1,644 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseContentServiceRestTransport(ContentServiceTransport): - """Base REST backend transport for ContentService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/contentitems', - 'body': 'content', - }, - { - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/content', - 'body': 'content', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcd_content.CreateContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseCreateContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/content/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = content.DeleteContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseDeleteContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/content/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = content.GetContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseGetContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/contentitems', - }, - { - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/content', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = content.ListContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseListContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/lakes/*/content/**}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateContent: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{content.name=projects/*/locations/*/lakes/*/contentitems/**}', - 'body': 'content', - }, - { - 'method': 'patch', - 'uri': '/v1/{content.name=projects/*/locations/*/lakes/*/content/**}', - 'body': 'content', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcd_content.UpdateContentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseContentServiceRestTransport._BaseUpdateContent._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseContentServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py deleted file mode 100644 index 0fa571256762..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataScanServiceClient -from .async_client import DataScanServiceAsyncClient - -__all__ = ( - 'DataScanServiceClient', - 'DataScanServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py deleted file mode 100644 index 9e4c31893087..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ /dev/null @@ -1,1755 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport -from .client import DataScanServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataScanServiceAsyncClient: - """DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - """ - - _client: DataScanServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataScanServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE - - connection_path = staticmethod(DataScanServiceClient.connection_path) - parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) - data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) - parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) - data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) - parse_data_scan_job_path = staticmethod(DataScanServiceClient.parse_data_scan_job_path) - dataset_path = staticmethod(DataScanServiceClient.dataset_path) - parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) - entity_path = staticmethod(DataScanServiceClient.entity_path) - parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) - common_billing_account_path = staticmethod(DataScanServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataScanServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataScanServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataScanServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataScanServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataScanServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataScanServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataScanServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataScanServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataScanServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceAsyncClient: The constructed client. - """ - return DataScanServiceClient.from_service_account_info.__func__(DataScanServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceAsyncClient: The constructed client. - """ - return DataScanServiceClient.from_service_account_file.__func__(DataScanServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataScanServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataScanServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataScanServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataScanServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data scan service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataScanServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataScanServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataScanServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "credentialsType": None, - } - ) - - async def create_data_scan(self, - request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_scan: Optional[datascans.DataScan] = None, - data_scan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]]): - The request object. Create dataScan request. - parent (:class:`str`): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): - Required. DataScan resource. - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan_id (:class:`str`): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - - This corresponds to the ``data_scan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_scan, data_scan_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.CreateDataScanRequest): - request = datascans.CreateDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_scan is not None: - request.data_scan = data_scan - if data_scan_id is not None: - request.data_scan_id = data_scan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_scan(self, - request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, - *, - data_scan: Optional[datascans.DataScan] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]]): - The request object. Update dataScan request. - data_scan (:class:`google.cloud.dataplex_v1.types.DataScan`): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_scan, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.UpdateDataScanRequest): - request = datascans.UpdateDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_scan is not None: - request.data_scan = data_scan - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_scan.name", request.data_scan.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_scan(self, - request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]]): - The request object. Delete dataScan request. - name (:class:`str`): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.DeleteDataScanRequest): - request = datascans.DeleteDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_data_scan(self, - request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.DataScan: - r"""Gets a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]]): - The request object. Get dataScan request. - name (:class:`str`): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataScan: - Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanRequest): - request = datascans.GetDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_scans(self, - request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataScansAsyncPager: - r"""Lists DataScans. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]]): - The request object. List dataScans request. - parent (:class:`str`): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager: - List dataScans response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScansRequest): - request = datascans.ListDataScansRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataScansAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_data_scan(self, - request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.RunDataScanResponse: - r"""Runs an on-demand execution of a DataScan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.run_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]]): - The request object. Run DataScan Request - name (:class:`str`): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - Only **OnDemand** data scans are allowed. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.RunDataScanResponse: - Run DataScan Response. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.RunDataScanRequest): - request = datascans.RunDataScanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.run_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_scan_job(self, - request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.DataScanJob: - r"""Gets a DataScanJob resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]]): - The request object. Get DataScanJob request. - name (:class:`str`): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataScanJob: - A DataScanJob represents an instance - of DataScan execution. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanJobRequest): - request = datascans.GetDataScanJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_scan_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_scan_jobs(self, - request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataScanJobsAsyncPager: - r"""Lists DataScanJobs under the given DataScan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]]): - The request object. List DataScanJobs request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager: - List DataScanJobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScanJobsRequest): - request = datascans.ListDataScanJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_scan_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataScanJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def generate_data_quality_rules(self, - request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = await client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]]): - The request object. Request details for generating data - quality rule recommendations. - name (:class:`str`): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling job - (a data scan job where the job type is data profiling) - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Response details for data quality - rule recommendations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GenerateDataQualityRulesRequest): - request = datascans.GenerateDataQualityRulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.generate_data_quality_rules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataScanServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DataScanServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py deleted file mode 100644 index a37c5519e9b5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ /dev/null @@ -1,2154 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataScanServiceGrpcTransport -from .transports.grpc_asyncio import DataScanServiceGrpcAsyncIOTransport -from .transports.rest import DataScanServiceRestTransport - - -class DataScanServiceClientMeta(type): - """Metaclass for the DataScanService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] - _transport_registry["grpc"] = DataScanServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataScanServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DataScanServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataScanServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataScanServiceClient(metaclass=DataScanServiceClientMeta): - """DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataScanServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataScanServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataScanServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_scan_path(project: str,location: str,dataScan: str,) -> str: - """Returns a fully-qualified data_scan string.""" - return "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) - - @staticmethod - def parse_data_scan_path(path: str) -> Dict[str,str]: - """Parses a data_scan path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_scan_job_path(project: str,location: str,dataScan: str,job: str,) -> str: - """Returns a fully-qualified data_scan_job string.""" - return "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) - - @staticmethod - def parse_data_scan_job_path(path: str) -> Dict[str,str]: - """Parses a data_scan_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataScans/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: - """Returns a fully-qualified entity string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - - @staticmethod - def parse_entity_path(path: str) -> Dict[str,str]: - """Parses a entity path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataScanServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataScanServiceTransport, Callable[..., DataScanServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data scan service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataScanServiceTransport,Callable[..., DataScanServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataScanServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataScanServiceClient._read_environment_variables() - self._client_cert_source = DataScanServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataScanServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataScanServiceTransport) - if transport_provided: - # transport is a DataScanServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataScanServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataScanServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataScanServiceTransport], Callable[..., DataScanServiceTransport]] = ( - DataScanServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataScanServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataScanServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "credentialsType": None, - } - ) - - def create_data_scan(self, - request: Optional[Union[datascans.CreateDataScanRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_scan: Optional[datascans.DataScan] = None, - data_scan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataScanRequest, dict]): - The request object. Create dataScan request. - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource. - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_scan_id (str): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - - This corresponds to the ``data_scan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_scan, data_scan_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.CreateDataScanRequest): - request = datascans.CreateDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_scan is not None: - request.data_scan = data_scan - if data_scan_id is not None: - request.data_scan_id = data_scan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_scan(self, - request: Optional[Union[datascans.UpdateDataScanRequest, dict]] = None, - *, - data_scan: Optional[datascans.DataScan] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataScanRequest, dict]): - The request object. Update dataScan request. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - - This corresponds to the ``data_scan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataScan` Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_scan, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.UpdateDataScanRequest): - request = datascans.UpdateDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_scan is not None: - request.data_scan = data_scan - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_scan.name", request.data_scan.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datascans.DataScan, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_scan(self, - request: Optional[Union[datascans.DeleteDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataScanRequest, dict]): - The request object. Delete dataScan request. - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.DeleteDataScanRequest): - request = datascans.DeleteDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_data_scan(self, - request: Optional[Union[datascans.GetDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.DataScan: - r"""Gets a DataScan resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataScanRequest, dict]): - The request object. Get dataScan request. - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataScan: - Represents a user-visible job which provides the insights for the related - data source. - - For example: - - - Data quality: generates queries based on the rules - and runs against the data to get data quality check - results. For more information, see [Auto data - quality - overview](https://cloud.google.com/dataplex/docs/auto-data-quality-overview). - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see - [About data - profiling](https://cloud.google.com/dataplex/docs/data-profiling-overview). - - Data discovery: scans data in Cloud Storage buckets - to extract and then catalog metadata. For more - information, see [Discover and catalog Cloud - Storage - data](https://cloud.google.com/bigquery/docs/automatic-discovery). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanRequest): - request = datascans.GetDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_scans(self, - request: Optional[Union[datascans.ListDataScansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataScansPager: - r"""Lists DataScans. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataScansRequest, dict]): - The request object. List dataScans request. - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager: - List dataScans response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScansRequest): - request = datascans.ListDataScansRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_scans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataScansPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_data_scan(self, - request: Optional[Union[datascans.RunDataScanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.RunDataScanResponse: - r"""Runs an on-demand execution of a DataScan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.run_data_scan(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.RunDataScanRequest, dict]): - The request object. Run DataScan Request - name (str): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - Only **OnDemand** data scans are allowed. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.RunDataScanResponse: - Run DataScan Response. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.RunDataScanRequest): - request = datascans.RunDataScanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_data_scan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_scan_job(self, - request: Optional[Union[datascans.GetDataScanJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.DataScanJob: - r"""Gets a DataScanJob resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataScanJobRequest, dict]): - The request object. Get DataScanJob request. - name (str): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataScanJob: - A DataScanJob represents an instance - of DataScan execution. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GetDataScanJobRequest): - request = datascans.GetDataScanJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_scan_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_scan_jobs(self, - request: Optional[Union[datascans.ListDataScanJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataScanJobsPager: - r"""Lists DataScanJobs under the given DataScan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataScanJobsRequest, dict]): - The request object. List DataScanJobs request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager: - List DataScanJobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.ListDataScanJobsRequest): - request = datascans.ListDataScanJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_scan_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataScanJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def generate_data_quality_rules(self, - request: Optional[Union[datascans.GenerateDataQualityRulesRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datascans.GenerateDataQualityRulesResponse: - r"""Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest, dict]): - The request object. Request details for generating data - quality rule recommendations. - name (str): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling job - (a data scan job where the job type is data profiling) - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse: - Response details for data quality - rule recommendations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datascans.GenerateDataQualityRulesRequest): - request = datascans.GenerateDataQualityRulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.generate_data_quality_rules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataScanServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DataScanServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py deleted file mode 100644 index 9e99ef310bf3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/pagers.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import datascans - - -class ListDataScansPager: - """A pager for iterating through ``list_data_scans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_scans`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataScans`` requests and continue to iterate - through the ``data_scans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datascans.ListDataScansResponse], - request: datascans.ListDataScansRequest, - response: datascans.ListDataScansResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScansRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScansResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datascans.ListDataScansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datascans.ListDataScansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datascans.DataScan]: - for page in self.pages: - yield from page.data_scans - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScansAsyncPager: - """A pager for iterating through ``list_data_scans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_scans`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataScans`` requests and continue to iterate - through the ``data_scans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datascans.ListDataScansResponse]], - request: datascans.ListDataScansRequest, - response: datascans.ListDataScansResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScansRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScansResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datascans.ListDataScansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datascans.ListDataScansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datascans.DataScan]: - async def async_generator(): - async for page in self.pages: - for response in page.data_scans: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScanJobsPager: - """A pager for iterating through ``list_data_scan_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_scan_jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataScanJobs`` requests and continue to iterate - through the ``data_scan_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datascans.ListDataScanJobsResponse], - request: datascans.ListDataScanJobsRequest, - response: datascans.ListDataScanJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datascans.ListDataScanJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datascans.ListDataScanJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datascans.DataScanJob]: - for page in self.pages: - yield from page.data_scan_jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataScanJobsAsyncPager: - """A pager for iterating through ``list_data_scan_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_scan_jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataScanJobs`` requests and continue to iterate - through the ``data_scan_jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataScanJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datascans.ListDataScanJobsResponse]], - request: datascans.ListDataScanJobsRequest, - response: datascans.ListDataScanJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataScanJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataScanJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datascans.ListDataScanJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datascans.ListDataScanJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datascans.DataScanJob]: - async def async_generator(): - async for page in self.pages: - for response in page.data_scan_jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst deleted file mode 100644 index e27965be0a36..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataScanServiceTransport` is the ABC for all transports. -- public child `DataScanServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataScanServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataScanServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataScanServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py deleted file mode 100644 index 223ef82cfff0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataScanServiceTransport -from .grpc import DataScanServiceGrpcTransport -from .grpc_asyncio import DataScanServiceGrpcAsyncIOTransport -from .rest import DataScanServiceRestTransport -from .rest import DataScanServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataScanServiceTransport]] -_transport_registry['grpc'] = DataScanServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataScanServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DataScanServiceRestTransport - -__all__ = ( - 'DataScanServiceTransport', - 'DataScanServiceGrpcTransport', - 'DataScanServiceGrpcAsyncIOTransport', - 'DataScanServiceRestTransport', - 'DataScanServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py deleted file mode 100644 index cafedfaf10bd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/base.py +++ /dev/null @@ -1,363 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataScanServiceTransport(abc.ABC): - """Abstract transport class for DataScanService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_scan: gapic_v1.method.wrap_method( - self.create_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.update_data_scan: gapic_v1.method.wrap_method( - self.update_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_scan: gapic_v1.method.wrap_method( - self.delete_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan: gapic_v1.method.wrap_method( - self.get_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scans: gapic_v1.method.wrap_method( - self.list_data_scans, - default_timeout=None, - client_info=client_info, - ), - self.run_data_scan: gapic_v1.method.wrap_method( - self.run_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan_job: gapic_v1.method.wrap_method( - self.get_data_scan_job, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scan_jobs: gapic_v1.method.wrap_method( - self.list_data_scan_jobs, - default_timeout=None, - client_info=client_info, - ), - self.generate_data_quality_rules: gapic_v1.method.wrap_method( - self.generate_data_quality_rules, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - Union[ - datascans.DataScan, - Awaitable[datascans.DataScan] - ]]: - raise NotImplementedError() - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - Union[ - datascans.ListDataScansResponse, - Awaitable[datascans.ListDataScansResponse] - ]]: - raise NotImplementedError() - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - Union[ - datascans.RunDataScanResponse, - Awaitable[datascans.RunDataScanResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - Union[ - datascans.DataScanJob, - Awaitable[datascans.DataScanJob] - ]]: - raise NotImplementedError() - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - Union[ - datascans.ListDataScanJobsResponse, - Awaitable[datascans.ListDataScanJobsResponse] - ]]: - raise NotImplementedError() - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - Union[ - datascans.GenerateDataQualityRulesResponse, - Awaitable[datascans.GenerateDataQualityRulesResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataScanServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py deleted file mode 100644 index 3424a6c32dc9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc.py +++ /dev/null @@ -1,691 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataScanServiceGrpcTransport(DataScanServiceTransport): - """gRPC backend transport for DataScanService. - - DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data scan method over gRPC. - - Creates a DataScan resource. - - Returns: - Callable[[~.CreateDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_scan' not in self._stubs: - self._stubs['create_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', - request_serializer=datascans.CreateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_scan'] - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data scan method over gRPC. - - Updates a DataScan resource. - - Returns: - Callable[[~.UpdateDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_scan' not in self._stubs: - self._stubs['update_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', - request_serializer=datascans.UpdateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_scan'] - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data scan method over gRPC. - - Deletes a DataScan resource. - - Returns: - Callable[[~.DeleteDataScanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_scan' not in self._stubs: - self._stubs['delete_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', - request_serializer=datascans.DeleteDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_scan'] - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - datascans.DataScan]: - r"""Return a callable for the get data scan method over gRPC. - - Gets a DataScan resource. - - Returns: - Callable[[~.GetDataScanRequest], - ~.DataScan]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan' not in self._stubs: - self._stubs['get_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScan', - request_serializer=datascans.GetDataScanRequest.serialize, - response_deserializer=datascans.DataScan.deserialize, - ) - return self._stubs['get_data_scan'] - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - datascans.ListDataScansResponse]: - r"""Return a callable for the list data scans method over gRPC. - - Lists DataScans. - - Returns: - Callable[[~.ListDataScansRequest], - ~.ListDataScansResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scans' not in self._stubs: - self._stubs['list_data_scans'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScans', - request_serializer=datascans.ListDataScansRequest.serialize, - response_deserializer=datascans.ListDataScansResponse.deserialize, - ) - return self._stubs['list_data_scans'] - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - datascans.RunDataScanResponse]: - r"""Return a callable for the run data scan method over gRPC. - - Runs an on-demand execution of a DataScan - - Returns: - Callable[[~.RunDataScanRequest], - ~.RunDataScanResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_data_scan' not in self._stubs: - self._stubs['run_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/RunDataScan', - request_serializer=datascans.RunDataScanRequest.serialize, - response_deserializer=datascans.RunDataScanResponse.deserialize, - ) - return self._stubs['run_data_scan'] - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - datascans.DataScanJob]: - r"""Return a callable for the get data scan job method over gRPC. - - Gets a DataScanJob resource. - - Returns: - Callable[[~.GetDataScanJobRequest], - ~.DataScanJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan_job' not in self._stubs: - self._stubs['get_data_scan_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', - request_serializer=datascans.GetDataScanJobRequest.serialize, - response_deserializer=datascans.DataScanJob.deserialize, - ) - return self._stubs['get_data_scan_job'] - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - datascans.ListDataScanJobsResponse]: - r"""Return a callable for the list data scan jobs method over gRPC. - - Lists DataScanJobs under the given DataScan. - - Returns: - Callable[[~.ListDataScanJobsRequest], - ~.ListDataScanJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scan_jobs' not in self._stubs: - self._stubs['list_data_scan_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', - request_serializer=datascans.ListDataScanJobsRequest.serialize, - response_deserializer=datascans.ListDataScanJobsResponse.deserialize, - ) - return self._stubs['list_data_scan_jobs'] - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - datascans.GenerateDataQualityRulesResponse]: - r"""Return a callable for the generate data quality rules method over gRPC. - - Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - Returns: - Callable[[~.GenerateDataQualityRulesRequest], - ~.GenerateDataQualityRulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_data_quality_rules' not in self._stubs: - self._stubs['generate_data_quality_rules'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', - request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, - response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, - ) - return self._stubs['generate_data_quality_rules'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataScanServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py deleted file mode 100644 index 6a450d6f8511..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,782 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import datascans -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataScanServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataScanServiceGrpcAsyncIOTransport(DataScanServiceTransport): - """gRPC AsyncIO backend transport for DataScanService. - - DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data scan method over gRPC. - - Creates a DataScan resource. - - Returns: - Callable[[~.CreateDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_scan' not in self._stubs: - self._stubs['create_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/CreateDataScan', - request_serializer=datascans.CreateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_scan'] - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data scan method over gRPC. - - Updates a DataScan resource. - - Returns: - Callable[[~.UpdateDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_scan' not in self._stubs: - self._stubs['update_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/UpdateDataScan', - request_serializer=datascans.UpdateDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_scan'] - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data scan method over gRPC. - - Deletes a DataScan resource. - - Returns: - Callable[[~.DeleteDataScanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_scan' not in self._stubs: - self._stubs['delete_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/DeleteDataScan', - request_serializer=datascans.DeleteDataScanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_scan'] - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - Awaitable[datascans.DataScan]]: - r"""Return a callable for the get data scan method over gRPC. - - Gets a DataScan resource. - - Returns: - Callable[[~.GetDataScanRequest], - Awaitable[~.DataScan]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan' not in self._stubs: - self._stubs['get_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScan', - request_serializer=datascans.GetDataScanRequest.serialize, - response_deserializer=datascans.DataScan.deserialize, - ) - return self._stubs['get_data_scan'] - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - Awaitable[datascans.ListDataScansResponse]]: - r"""Return a callable for the list data scans method over gRPC. - - Lists DataScans. - - Returns: - Callable[[~.ListDataScansRequest], - Awaitable[~.ListDataScansResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scans' not in self._stubs: - self._stubs['list_data_scans'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScans', - request_serializer=datascans.ListDataScansRequest.serialize, - response_deserializer=datascans.ListDataScansResponse.deserialize, - ) - return self._stubs['list_data_scans'] - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - Awaitable[datascans.RunDataScanResponse]]: - r"""Return a callable for the run data scan method over gRPC. - - Runs an on-demand execution of a DataScan - - Returns: - Callable[[~.RunDataScanRequest], - Awaitable[~.RunDataScanResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_data_scan' not in self._stubs: - self._stubs['run_data_scan'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/RunDataScan', - request_serializer=datascans.RunDataScanRequest.serialize, - response_deserializer=datascans.RunDataScanResponse.deserialize, - ) - return self._stubs['run_data_scan'] - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - Awaitable[datascans.DataScanJob]]: - r"""Return a callable for the get data scan job method over gRPC. - - Gets a DataScanJob resource. - - Returns: - Callable[[~.GetDataScanJobRequest], - Awaitable[~.DataScanJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_scan_job' not in self._stubs: - self._stubs['get_data_scan_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GetDataScanJob', - request_serializer=datascans.GetDataScanJobRequest.serialize, - response_deserializer=datascans.DataScanJob.deserialize, - ) - return self._stubs['get_data_scan_job'] - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - Awaitable[datascans.ListDataScanJobsResponse]]: - r"""Return a callable for the list data scan jobs method over gRPC. - - Lists DataScanJobs under the given DataScan. - - Returns: - Callable[[~.ListDataScanJobsRequest], - Awaitable[~.ListDataScanJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_scan_jobs' not in self._stubs: - self._stubs['list_data_scan_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/ListDataScanJobs', - request_serializer=datascans.ListDataScanJobsRequest.serialize, - response_deserializer=datascans.ListDataScanJobsResponse.deserialize, - ) - return self._stubs['list_data_scan_jobs'] - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - Awaitable[datascans.GenerateDataQualityRulesResponse]]: - r"""Return a callable for the generate data quality rules method over gRPC. - - Generates recommended data quality rules based on the - results of a data profiling scan. - - Use the recommendations to build rules for a data - quality scan. - - Returns: - Callable[[~.GenerateDataQualityRulesRequest], - Awaitable[~.GenerateDataQualityRulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'generate_data_quality_rules' not in self._stubs: - self._stubs['generate_data_quality_rules'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataScanService/GenerateDataQualityRules', - request_serializer=datascans.GenerateDataQualityRulesRequest.serialize, - response_deserializer=datascans.GenerateDataQualityRulesResponse.deserialize, - ) - return self._stubs['generate_data_quality_rules'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_scan: self._wrap_method( - self.create_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.update_data_scan: self._wrap_method( - self.update_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_scan: self._wrap_method( - self.delete_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan: self._wrap_method( - self.get_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scans: self._wrap_method( - self.list_data_scans, - default_timeout=None, - client_info=client_info, - ), - self.run_data_scan: self._wrap_method( - self.run_data_scan, - default_timeout=None, - client_info=client_info, - ), - self.get_data_scan_job: self._wrap_method( - self.get_data_scan_job, - default_timeout=None, - client_info=client_info, - ), - self.list_data_scan_jobs: self._wrap_method( - self.list_data_scan_jobs, - default_timeout=None, - client_info=client_info, - ), - self.generate_data_quality_rules: self._wrap_method( - self.generate_data_quality_rules, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataScanServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py deleted file mode 100644 index 467803d26e62..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py +++ /dev/null @@ -1,2620 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import datascans -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseDataScanServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataScanServiceRestInterceptor: - """Interceptor for DataScanService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DataScanServiceRestTransport. - - .. code-block:: python - class MyCustomDataScanServiceInterceptor(DataScanServiceRestInterceptor): - def pre_create_data_scan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_data_scan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_data_scan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_data_scan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_generate_data_quality_rules(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_generate_data_quality_rules(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_scan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_scan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_scan_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_scan_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_scan_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_scan_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_scans(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_scans(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_data_scan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_data_scan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_scan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_scan(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DataScanServiceRestTransport(interceptor=MyCustomDataScanServiceInterceptor()) - client = DataScanServiceClient(transport=transport) - - - """ - def pre_create_data_scan(self, request: datascans.CreateDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.CreateDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_data_scan - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_create_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_data_scan - - DEPRECATED. Please use the `post_create_data_scan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_create_data_scan` interceptor runs - before the `post_create_data_scan_with_metadata` interceptor. - """ - return response - - def post_create_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_data_scan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_create_data_scan_with_metadata` - interceptor in new development instead of the `post_create_data_scan` interceptor. - When both interceptors are used, this `post_create_data_scan_with_metadata` interceptor runs after the - `post_create_data_scan` interceptor. The (possibly modified) response returned by - `post_create_data_scan` will be passed to - `post_create_data_scan_with_metadata`. - """ - return response, metadata - - def pre_delete_data_scan(self, request: datascans.DeleteDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DeleteDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_data_scan - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_delete_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_data_scan - - DEPRECATED. Please use the `post_delete_data_scan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_delete_data_scan` interceptor runs - before the `post_delete_data_scan_with_metadata` interceptor. - """ - return response - - def post_delete_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_data_scan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_delete_data_scan_with_metadata` - interceptor in new development instead of the `post_delete_data_scan` interceptor. - When both interceptors are used, this `post_delete_data_scan_with_metadata` interceptor runs after the - `post_delete_data_scan` interceptor. The (possibly modified) response returned by - `post_delete_data_scan` will be passed to - `post_delete_data_scan_with_metadata`. - """ - return response, metadata - - def pre_generate_data_quality_rules(self, request: datascans.GenerateDataQualityRulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GenerateDataQualityRulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for generate_data_quality_rules - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_generate_data_quality_rules(self, response: datascans.GenerateDataQualityRulesResponse) -> datascans.GenerateDataQualityRulesResponse: - """Post-rpc interceptor for generate_data_quality_rules - - DEPRECATED. Please use the `post_generate_data_quality_rules_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_generate_data_quality_rules` interceptor runs - before the `post_generate_data_quality_rules_with_metadata` interceptor. - """ - return response - - def post_generate_data_quality_rules_with_metadata(self, response: datascans.GenerateDataQualityRulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GenerateDataQualityRulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for generate_data_quality_rules - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_generate_data_quality_rules_with_metadata` - interceptor in new development instead of the `post_generate_data_quality_rules` interceptor. - When both interceptors are used, this `post_generate_data_quality_rules_with_metadata` interceptor runs after the - `post_generate_data_quality_rules` interceptor. The (possibly modified) response returned by - `post_generate_data_quality_rules` will be passed to - `post_generate_data_quality_rules_with_metadata`. - """ - return response, metadata - - def pre_get_data_scan(self, request: datascans.GetDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GetDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_scan - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_get_data_scan(self, response: datascans.DataScan) -> datascans.DataScan: - """Post-rpc interceptor for get_data_scan - - DEPRECATED. Please use the `post_get_data_scan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_get_data_scan` interceptor runs - before the `post_get_data_scan_with_metadata` interceptor. - """ - return response - - def post_get_data_scan_with_metadata(self, response: datascans.DataScan, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DataScan, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_scan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_get_data_scan_with_metadata` - interceptor in new development instead of the `post_get_data_scan` interceptor. - When both interceptors are used, this `post_get_data_scan_with_metadata` interceptor runs after the - `post_get_data_scan` interceptor. The (possibly modified) response returned by - `post_get_data_scan` will be passed to - `post_get_data_scan_with_metadata`. - """ - return response, metadata - - def pre_get_data_scan_job(self, request: datascans.GetDataScanJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.GetDataScanJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_scan_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_get_data_scan_job(self, response: datascans.DataScanJob) -> datascans.DataScanJob: - """Post-rpc interceptor for get_data_scan_job - - DEPRECATED. Please use the `post_get_data_scan_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_get_data_scan_job` interceptor runs - before the `post_get_data_scan_job_with_metadata` interceptor. - """ - return response - - def post_get_data_scan_job_with_metadata(self, response: datascans.DataScanJob, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.DataScanJob, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_scan_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_get_data_scan_job_with_metadata` - interceptor in new development instead of the `post_get_data_scan_job` interceptor. - When both interceptors are used, this `post_get_data_scan_job_with_metadata` interceptor runs after the - `post_get_data_scan_job` interceptor. The (possibly modified) response returned by - `post_get_data_scan_job` will be passed to - `post_get_data_scan_job_with_metadata`. - """ - return response, metadata - - def pre_list_data_scan_jobs(self, request: datascans.ListDataScanJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScanJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_scan_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_list_data_scan_jobs(self, response: datascans.ListDataScanJobsResponse) -> datascans.ListDataScanJobsResponse: - """Post-rpc interceptor for list_data_scan_jobs - - DEPRECATED. Please use the `post_list_data_scan_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_list_data_scan_jobs` interceptor runs - before the `post_list_data_scan_jobs_with_metadata` interceptor. - """ - return response - - def post_list_data_scan_jobs_with_metadata(self, response: datascans.ListDataScanJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScanJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_scan_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_list_data_scan_jobs_with_metadata` - interceptor in new development instead of the `post_list_data_scan_jobs` interceptor. - When both interceptors are used, this `post_list_data_scan_jobs_with_metadata` interceptor runs after the - `post_list_data_scan_jobs` interceptor. The (possibly modified) response returned by - `post_list_data_scan_jobs` will be passed to - `post_list_data_scan_jobs_with_metadata`. - """ - return response, metadata - - def pre_list_data_scans(self, request: datascans.ListDataScansRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScansRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_scans - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_list_data_scans(self, response: datascans.ListDataScansResponse) -> datascans.ListDataScansResponse: - """Post-rpc interceptor for list_data_scans - - DEPRECATED. Please use the `post_list_data_scans_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_list_data_scans` interceptor runs - before the `post_list_data_scans_with_metadata` interceptor. - """ - return response - - def post_list_data_scans_with_metadata(self, response: datascans.ListDataScansResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.ListDataScansResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_scans - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_list_data_scans_with_metadata` - interceptor in new development instead of the `post_list_data_scans` interceptor. - When both interceptors are used, this `post_list_data_scans_with_metadata` interceptor runs after the - `post_list_data_scans` interceptor. The (possibly modified) response returned by - `post_list_data_scans` will be passed to - `post_list_data_scans_with_metadata`. - """ - return response, metadata - - def pre_run_data_scan(self, request: datascans.RunDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.RunDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for run_data_scan - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_run_data_scan(self, response: datascans.RunDataScanResponse) -> datascans.RunDataScanResponse: - """Post-rpc interceptor for run_data_scan - - DEPRECATED. Please use the `post_run_data_scan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_run_data_scan` interceptor runs - before the `post_run_data_scan_with_metadata` interceptor. - """ - return response - - def post_run_data_scan_with_metadata(self, response: datascans.RunDataScanResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.RunDataScanResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for run_data_scan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_run_data_scan_with_metadata` - interceptor in new development instead of the `post_run_data_scan` interceptor. - When both interceptors are used, this `post_run_data_scan_with_metadata` interceptor runs after the - `post_run_data_scan` interceptor. The (possibly modified) response returned by - `post_run_data_scan` will be passed to - `post_run_data_scan_with_metadata`. - """ - return response, metadata - - def pre_update_data_scan(self, request: datascans.UpdateDataScanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datascans.UpdateDataScanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_scan - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_update_data_scan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_scan - - DEPRECATED. Please use the `post_update_data_scan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. This `post_update_data_scan` interceptor runs - before the `post_update_data_scan_with_metadata` interceptor. - """ - return response - - def post_update_data_scan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_scan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataScanService server but before it is returned to user code. - - We recommend only using this `post_update_data_scan_with_metadata` - interceptor in new development instead of the `post_update_data_scan` interceptor. - When both interceptors are used, this `post_update_data_scan_with_metadata` interceptor runs after the - `post_update_data_scan` interceptor. The (possibly modified) response returned by - `post_update_data_scan` will be passed to - `post_update_data_scan_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataScanService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DataScanService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DataScanServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DataScanServiceRestInterceptor - - -class DataScanServiceRestTransport(_BaseDataScanServiceRestTransport): - """REST backend synchronous transport for DataScanService. - - DataScanService manages DataScan resources which can be - configured to run various types of data scanning workload and - generate enriched metadata (e.g. Data Profile, Data Quality) for - the data source. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DataScanServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DataScanServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateDataScan(_BaseDataScanServiceRestTransport._BaseCreateDataScan, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.CreateDataScan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datascans.CreateDataScanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create data scan method over HTTP. - - Args: - request (~.datascans.CreateDataScanRequest): - The request object. Create dataScan request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_http_options() - - request, metadata = self._interceptor.pre_create_data_scan(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_transcoded_request(http_options, request) - - body = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseCreateDataScan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.CreateDataScan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "CreateDataScan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._CreateDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_data_scan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_data_scan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "CreateDataScan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataScan(_BaseDataScanServiceRestTransport._BaseDeleteDataScan, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.DeleteDataScan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datascans.DeleteDataScanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete data scan method over HTTP. - - Args: - request (~.datascans.DeleteDataScanRequest): - The request object. Delete dataScan request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_http_options() - - request, metadata = self._interceptor.pre_delete_data_scan(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.DeleteDataScan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "DeleteDataScan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._DeleteDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_data_scan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_data_scan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "DeleteDataScan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GenerateDataQualityRules(_BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.GenerateDataQualityRules") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datascans.GenerateDataQualityRulesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.GenerateDataQualityRulesResponse: - r"""Call the generate data quality - rules method over HTTP. - - Args: - request (~.datascans.GenerateDataQualityRulesRequest): - The request object. Request details for generating data - quality rule recommendations. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.GenerateDataQualityRulesResponse: - Response details for data quality - rule recommendations. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_http_options() - - request, metadata = self._interceptor.pre_generate_data_quality_rules(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_transcoded_request(http_options, request) - - body = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GenerateDataQualityRules", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GenerateDataQualityRules", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._GenerateDataQualityRules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.GenerateDataQualityRulesResponse() - pb_resp = datascans.GenerateDataQualityRulesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_generate_data_quality_rules(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_generate_data_quality_rules_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.GenerateDataQualityRulesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GenerateDataQualityRules", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataScan(_BaseDataScanServiceRestTransport._BaseGetDataScan, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.GetDataScan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datascans.GetDataScanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.DataScan: - r"""Call the get data scan method over HTTP. - - Args: - request (~.datascans.GetDataScanRequest): - The request object. Get dataScan request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.DataScan: - Represents a user-visible job which provides the - insights for the related data source. - - For example: - - - Data quality: generates queries based on the rules and - runs against the data to get data quality check - results. For more information, see `Auto data quality - overview `__. - - Data profile: analyzes the data in tables and - generates insights about the structure, content and - relationships (such as null percent, cardinality, - min/max/mean, etc). For more information, see `About - data - profiling `__. - - Data discovery: scans data in Cloud Storage buckets to - extract and then catalog metadata. For more - information, see `Discover and catalog Cloud Storage - data `__. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_http_options() - - request, metadata = self._interceptor.pre_get_data_scan(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseGetDataScan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetDataScan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetDataScan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._GetDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.DataScan() - pb_resp = datascans.DataScan.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_scan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_scan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.DataScan.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetDataScan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataScanJob(_BaseDataScanServiceRestTransport._BaseGetDataScanJob, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.GetDataScanJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datascans.GetDataScanJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.DataScanJob: - r"""Call the get data scan job method over HTTP. - - Args: - request (~.datascans.GetDataScanJobRequest): - The request object. Get DataScanJob request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.DataScanJob: - A DataScanJob represents an instance - of DataScan execution. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_http_options() - - request, metadata = self._interceptor.pre_get_data_scan_job(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetDataScanJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetDataScanJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._GetDataScanJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.DataScanJob() - pb_resp = datascans.DataScanJob.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_scan_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_scan_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.DataScanJob.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetDataScanJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataScanJobs(_BaseDataScanServiceRestTransport._BaseListDataScanJobs, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.ListDataScanJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datascans.ListDataScanJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.ListDataScanJobsResponse: - r"""Call the list data scan jobs method over HTTP. - - Args: - request (~.datascans.ListDataScanJobsRequest): - The request object. List DataScanJobs request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.ListDataScanJobsResponse: - List DataScanJobs response. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_data_scan_jobs(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListDataScanJobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListDataScanJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._ListDataScanJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.ListDataScanJobsResponse() - pb_resp = datascans.ListDataScanJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_scan_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_scan_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.ListDataScanJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListDataScanJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataScans(_BaseDataScanServiceRestTransport._BaseListDataScans, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.ListDataScans") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datascans.ListDataScansRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.ListDataScansResponse: - r"""Call the list data scans method over HTTP. - - Args: - request (~.datascans.ListDataScansRequest): - The request object. List dataScans request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.ListDataScansResponse: - List dataScans response. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseListDataScans._get_http_options() - - request, metadata = self._interceptor.pre_list_data_scans(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseListDataScans._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseListDataScans._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListDataScans", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListDataScans", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._ListDataScans._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.ListDataScansResponse() - pb_resp = datascans.ListDataScansResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_scans(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_scans_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.ListDataScansResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListDataScans", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RunDataScan(_BaseDataScanServiceRestTransport._BaseRunDataScan, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.RunDataScan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datascans.RunDataScanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datascans.RunDataScanResponse: - r"""Call the run data scan method over HTTP. - - Args: - request (~.datascans.RunDataScanRequest): - The request object. Run DataScan Request - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datascans.RunDataScanResponse: - Run DataScan Response. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_http_options() - - request, metadata = self._interceptor.pre_run_data_scan(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_transcoded_request(http_options, request) - - body = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseRunDataScan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.RunDataScan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "RunDataScan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._RunDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datascans.RunDataScanResponse() - pb_resp = datascans.RunDataScanResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_run_data_scan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_run_data_scan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datascans.RunDataScanResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "RunDataScan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataScan(_BaseDataScanServiceRestTransport._BaseUpdateDataScan, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.UpdateDataScan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datascans.UpdateDataScanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update data scan method over HTTP. - - Args: - request (~.datascans.UpdateDataScanRequest): - The request object. Update dataScan request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_http_options() - - request, metadata = self._interceptor.pre_update_data_scan(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_transcoded_request(http_options, request) - - body = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.UpdateDataScan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "UpdateDataScan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._UpdateDataScan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_scan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_scan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "UpdateDataScan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_data_scan(self) -> Callable[ - [datascans.CreateDataScanRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataScan(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_data_scan(self) -> Callable[ - [datascans.DeleteDataScanRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataScan(self._session, self._host, self._interceptor) # type: ignore - - @property - def generate_data_quality_rules(self) -> Callable[ - [datascans.GenerateDataQualityRulesRequest], - datascans.GenerateDataQualityRulesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GenerateDataQualityRules(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_scan(self) -> Callable[ - [datascans.GetDataScanRequest], - datascans.DataScan]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataScan(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_scan_job(self) -> Callable[ - [datascans.GetDataScanJobRequest], - datascans.DataScanJob]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataScanJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_scan_jobs(self) -> Callable[ - [datascans.ListDataScanJobsRequest], - datascans.ListDataScanJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataScanJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_scans(self) -> Callable[ - [datascans.ListDataScansRequest], - datascans.ListDataScansResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataScans(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_data_scan(self) -> Callable[ - [datascans.RunDataScanRequest], - datascans.RunDataScanResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunDataScan(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_scan(self) -> Callable[ - [datascans.UpdateDataScanRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataScan(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseDataScanServiceRestTransport._BaseGetLocation, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseDataScanServiceRestTransport._BaseListLocations, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseDataScanServiceRestTransport._BaseCancelOperation, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseDataScanServiceRestTransport._BaseDeleteOperation, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseDataScanServiceRestTransport._BaseGetOperation, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseDataScanServiceRestTransport._BaseListOperations, DataScanServiceRestStub): - def __hash__(self): - return hash("DataScanServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseDataScanServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseDataScanServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataScanServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataScanServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataScanServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataScanServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataScanService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DataScanServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py deleted file mode 100644 index b099e32c902a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py +++ /dev/null @@ -1,645 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import DataScanServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import datascans -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseDataScanServiceRestTransport(DataScanServiceTransport): - """Base REST backend transport for DataScanService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateDataScan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "dataScanId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/dataScans', - 'body': 'data_scan', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.CreateDataScanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseCreateDataScan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataScan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.DeleteDataScanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseDeleteDataScan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGenerateDataQualityRules: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}:generateDataQualityRules', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}:generateDataQualityRules', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.GenerateDataQualityRulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseGenerateDataQualityRules._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataScan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.GetDataScanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseGetDataScan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataScanJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.GetDataScanJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseGetDataScanJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataScanJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/dataScans/*}/jobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.ListDataScanJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseListDataScanJobs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataScans: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dataScans', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.ListDataScansRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseListDataScans._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRunDataScan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/dataScans/*}:run', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.RunDataScanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseRunDataScan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataScan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_scan.name=projects/*/locations/*/dataScans/*}', - 'body': 'data_scan', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datascans.UpdateDataScanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataScanServiceRestTransport._BaseUpdateDataScan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDataScanServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py deleted file mode 100644 index 68e09c57de83..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataTaxonomyServiceClient -from .async_client import DataTaxonomyServiceAsyncClient - -__all__ = ( - 'DataTaxonomyServiceClient', - 'DataTaxonomyServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py deleted file mode 100644 index b30b9bf94e01..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ /dev/null @@ -1,2543 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport -from .client import DataTaxonomyServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataTaxonomyServiceAsyncClient: - """DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - """ - - _client: DataTaxonomyServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - - data_attribute_path = staticmethod(DataTaxonomyServiceClient.data_attribute_path) - parse_data_attribute_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_path) - data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.data_attribute_binding_path) - parse_data_attribute_binding_path = staticmethod(DataTaxonomyServiceClient.parse_data_attribute_binding_path) - data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.data_taxonomy_path) - parse_data_taxonomy_path = staticmethod(DataTaxonomyServiceClient.parse_data_taxonomy_path) - common_billing_account_path = staticmethod(DataTaxonomyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataTaxonomyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataTaxonomyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataTaxonomyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataTaxonomyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataTaxonomyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataTaxonomyServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataTaxonomyServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataTaxonomyServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataTaxonomyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceAsyncClient: The constructed client. - """ - return DataTaxonomyServiceClient.from_service_account_info.__func__(DataTaxonomyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceAsyncClient: The constructed client. - """ - return DataTaxonomyServiceClient.from_service_account_file.__func__(DataTaxonomyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataTaxonomyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataTaxonomyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTaxonomyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataTaxonomyServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data taxonomy service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTaxonomyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataTaxonomyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "credentialsType": None, - } - ) - - async def create_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - data_taxonomy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]]): - The request object. Create DataTaxonomy request. - parent (:class:`str`): - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): - Required. DataTaxonomy resource. - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy_id (:class:`str`): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - - This corresponds to the ``data_taxonomy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.create_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_taxonomy, data_taxonomy_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): - request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if data_taxonomy_id is not None: - request.data_taxonomy_id = data_taxonomy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, - *, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]]): - The request object. Update DataTaxonomy request. - data_taxonomy (:class:`google.cloud.dataplex_v1.types.DataTaxonomy`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.update_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_taxonomy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_taxonomy.name", request.data_taxonomy.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_taxonomy(self, - request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]]): - The request object. Delete DataTaxonomy request. - name (:class:`str`): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): - request = data_taxonomy.DeleteDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_taxonomies(self, - request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataTaxonomiesAsyncPager: - r"""Lists DataTaxonomy resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]]): - The request object. List DataTaxonomies request. - parent (:class:`str`): - Required. The resource name of the DataTaxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager: - List DataTaxonomies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.list_data_taxonomies is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): - request = data_taxonomy.ListDataTaxonomiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataTaxonomiesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_taxonomy(self, - request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataTaxonomy: - r"""Retrieves a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]]): - The request object. Get DataTaxonomy request. - name (:class:`str`): - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataTaxonomy: - DataTaxonomy represents a set of - hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have - attributes to manage PII data. It is - defined at project level. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.get_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): - request = data_taxonomy.GetDataTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - data_attribute_binding_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]]): - The request object. Create DataAttributeBinding request. - parent (:class:`str`): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): - Required. DataAttributeBinding - resource. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding_id (:class:`str`): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - - This corresponds to the ``data_attribute_binding_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.create_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_attribute_binding, data_attribute_binding_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): - request = data_taxonomy.CreateDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if data_attribute_binding_id is not None: - request.data_attribute_binding_id = data_attribute_binding_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, - *, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]]): - The request object. Update DataAttributeBinding request. - data_attribute_binding (:class:`google.cloud.dataplex_v1.types.DataAttributeBinding`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.update_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_attribute_binding, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): - request = data_taxonomy.UpdateDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute_binding.name", request.data_attribute_binding.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]]): - The request object. Delete DataAttributeBinding request. - name (:class:`str`): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): - request = data_taxonomy.DeleteDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_attribute_bindings(self, - request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataAttributeBindingsAsyncPager: - r"""Lists DataAttributeBinding resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]]): - The request object. List DataAttributeBindings request. - parent (:class:`str`): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager: - List DataAttributeBindings response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.list_data_attribute_bindings is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): - request = data_taxonomy.ListDataAttributeBindingsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attribute_bindings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataAttributeBindingsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataAttributeBinding: - r"""Retrieves a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]]): - The request object. Get DataAttributeBinding request. - name (:class:`str`): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataAttributeBinding: - DataAttributeBinding represents - binding of attributes to resources. Eg: - Bind 'CustomerInfo' entity with 'PII' - attribute. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.get_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): - request = data_taxonomy.GetDataAttributeBindingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_attribute(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - data_attribute_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]]): - The request object. Create DataAttribute request. - parent (:class:`str`): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): - Required. DataAttribute resource. - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_id (:class:`str`): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - - This corresponds to the ``data_attribute_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.create_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_attribute, data_attribute_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): - request = data_taxonomy.CreateDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute is not None: - request.data_attribute = data_attribute - if data_attribute_id is not None: - request.data_attribute_id = data_attribute_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_data_attribute(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, - *, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]]): - The request object. Update DataAttribute request. - data_attribute (:class:`google.cloud.dataplex_v1.types.DataAttribute`): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.update_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_attribute, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): - request = data_taxonomy.UpdateDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute is not None: - request.data_attribute = data_attribute - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute.name", request.data_attribute.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_data_attribute(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]]): - The request object. Delete DataAttribute request. - name (:class:`str`): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.delete_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): - request = data_taxonomy.DeleteDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_attributes(self, - request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataAttributesAsyncPager: - r"""Lists Data Attribute resources in a DataTaxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]]): - The request object. List DataAttributes request. - parent (:class:`str`): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager: - List DataAttributes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.list_data_attributes is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributesRequest): - request = data_taxonomy.ListDataAttributesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_attributes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataAttributesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_attribute(self, - request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataAttribute: - r"""Retrieves a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]]): - The request object. Get DataAttribute request. - name (:class:`str`): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataAttribute: - Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceAsyncClient.get_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeRequest): - request = data_taxonomy.GetDataAttributeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataTaxonomyServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DataTaxonomyServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py deleted file mode 100644 index 06bf7e455890..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ /dev/null @@ -1,2917 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataTaxonomyServiceGrpcTransport -from .transports.grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport -from .transports.rest import DataTaxonomyServiceRestTransport - - -class DataTaxonomyServiceClientMeta(type): - """Metaclass for the DataTaxonomyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] - _transport_registry["grpc"] = DataTaxonomyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataTaxonomyServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DataTaxonomyServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataTaxonomyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataTaxonomyServiceClient(metaclass=DataTaxonomyServiceClientMeta): - """DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTaxonomyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataTaxonomyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTaxonomyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_attribute_path(project: str,location: str,dataTaxonomy: str,data_attribute_id: str,) -> str: - """Returns a fully-qualified data_attribute string.""" - return "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) - - @staticmethod - def parse_data_attribute_path(path: str) -> Dict[str,str]: - """Parses a data_attribute path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)/attributes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_attribute_binding_path(project: str,location: str,data_attribute_binding_id: str,) -> str: - """Returns a fully-qualified data_attribute_binding string.""" - return "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) - - @staticmethod - def parse_data_attribute_binding_path(path: str) -> Dict[str,str]: - """Parses a data_attribute_binding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataAttributeBindings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_taxonomy_path(project: str,location: str,data_taxonomy_id: str,) -> str: - """Returns a fully-qualified data_taxonomy string.""" - return "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) - - @staticmethod - def parse_data_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a data_taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataTaxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTaxonomyServiceTransport, Callable[..., DataTaxonomyServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data taxonomy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTaxonomyServiceTransport,Callable[..., DataTaxonomyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTaxonomyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataTaxonomyServiceClient._read_environment_variables() - self._client_cert_source = DataTaxonomyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataTaxonomyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataTaxonomyServiceTransport) - if transport_provided: - # transport is a DataTaxonomyServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataTaxonomyServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataTaxonomyServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataTaxonomyServiceTransport], Callable[..., DataTaxonomyServiceTransport]] = ( - DataTaxonomyServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataTaxonomyServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataTaxonomyServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "credentialsType": None, - } - ) - - def create_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.CreateDataTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - data_taxonomy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest, dict]): - The request object. Create DataTaxonomy request. - parent (str): - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. DataTaxonomy resource. - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_taxonomy_id (str): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - - This corresponds to the ``data_taxonomy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - warnings.warn("DataTaxonomyServiceClient.create_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_taxonomy, data_taxonomy_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.CreateDataTaxonomyRequest): - request = gcd_data_taxonomy.CreateDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if data_taxonomy_id is not None: - request.data_taxonomy_id = data_taxonomy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_taxonomy(self, - request: Optional[Union[gcd_data_taxonomy.UpdateDataTaxonomyRequest, dict]] = None, - *, - data_taxonomy: Optional[gcd_data_taxonomy.DataTaxonomy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest, dict]): - The request object. Update DataTaxonomy request. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataTaxonomy` DataTaxonomy represents a set of hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage - PII data. It is defined at project level. - - """ - warnings.warn("DataTaxonomyServiceClient.update_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_taxonomy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcd_data_taxonomy.UpdateDataTaxonomyRequest): - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_taxonomy is not None: - request.data_taxonomy = data_taxonomy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_taxonomy.name", request.data_taxonomy.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcd_data_taxonomy.DataTaxonomy, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_taxonomy(self, - request: Optional[Union[data_taxonomy.DeleteDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest, dict]): - The request object. Delete DataTaxonomy request. - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceClient.delete_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataTaxonomyRequest): - request = data_taxonomy.DeleteDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_taxonomies(self, - request: Optional[Union[data_taxonomy.ListDataTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataTaxonomiesPager: - r"""Lists DataTaxonomy resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest, dict]): - The request object. List DataTaxonomies request. - parent (str): - Required. The resource name of the DataTaxonomy - location, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager: - List DataTaxonomies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceClient.list_data_taxonomies is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataTaxonomiesRequest): - request = data_taxonomy.ListDataTaxonomiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataTaxonomiesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_taxonomy(self, - request: Optional[Union[data_taxonomy.GetDataTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataTaxonomy: - r"""Retrieves a DataTaxonomy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataTaxonomyRequest, dict]): - The request object. Get DataTaxonomy request. - name (str): - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataTaxonomy: - DataTaxonomy represents a set of - hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have - attributes to manage PII data. It is - defined at project level. - - """ - warnings.warn("DataTaxonomyServiceClient.get_data_taxonomy is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataTaxonomyRequest): - request = data_taxonomy.GetDataTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeBindingRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - data_attribute_binding_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest, dict]): - The request object. Create DataAttributeBinding request. - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. DataAttributeBinding - resource. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_binding_id (str): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - - This corresponds to the ``data_attribute_binding_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - warnings.warn("DataTaxonomyServiceClient.create_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_attribute_binding, data_attribute_binding_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeBindingRequest): - request = data_taxonomy.CreateDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if data_attribute_binding_id is not None: - request.data_attribute_binding_id = data_attribute_binding_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeBindingRequest, dict]] = None, - *, - data_attribute_binding: Optional[data_taxonomy.DataAttributeBinding] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest, dict]): - The request object. Update DataAttributeBinding request. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute_binding`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttributeBinding` DataAttributeBinding represents binding of attributes to resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. - - """ - warnings.warn("DataTaxonomyServiceClient.update_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_attribute_binding, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeBindingRequest): - request = data_taxonomy.UpdateDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute_binding is not None: - request.data_attribute_binding = data_attribute_binding - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute_binding.name", request.data_attribute_binding.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttributeBinding, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest, dict]): - The request object. Delete DataAttributeBinding request. - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceClient.delete_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeBindingRequest): - request = data_taxonomy.DeleteDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_attribute_bindings(self, - request: Optional[Union[data_taxonomy.ListDataAttributeBindingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataAttributeBindingsPager: - r"""Lists DataAttributeBinding resources in a project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest, dict]): - The request object. List DataAttributeBindings request. - parent (str): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager: - List DataAttributeBindings response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceClient.list_data_attribute_bindings is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributeBindingsRequest): - request = data_taxonomy.ListDataAttributeBindingsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_attribute_bindings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataAttributeBindingsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_attribute_binding(self, - request: Optional[Union[data_taxonomy.GetDataAttributeBindingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataAttributeBinding: - r"""Retrieves a DataAttributeBinding resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest, dict]): - The request object. Get DataAttributeBinding request. - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataAttributeBinding: - DataAttributeBinding represents - binding of attributes to resources. Eg: - Bind 'CustomerInfo' entity with 'PII' - attribute. - - """ - warnings.warn("DataTaxonomyServiceClient.get_data_attribute_binding is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeBindingRequest): - request = data_taxonomy.GetDataAttributeBindingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_attribute_binding] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_attribute(self, - request: Optional[Union[data_taxonomy.CreateDataAttributeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - data_attribute_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateDataAttributeRequest, dict]): - The request object. Create DataAttribute request. - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. DataAttribute resource. - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_attribute_id (str): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - - This corresponds to the ``data_attribute_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceClient.create_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_attribute, data_attribute_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.CreateDataAttributeRequest): - request = data_taxonomy.CreateDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_attribute is not None: - request.data_attribute = data_attribute - if data_attribute_id is not None: - request.data_attribute_id = data_attribute_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_data_attribute(self, - request: Optional[Union[data_taxonomy.UpdateDataAttributeRequest, dict]] = None, - *, - data_attribute: Optional[data_taxonomy.DataAttribute] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a DataAttribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateDataAttributeRequest, dict]): - The request object. Update DataAttribute request. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. Only fields specified in ``update_mask`` are - updated. - - This corresponds to the ``data_attribute`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.DataAttribute` Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceClient.update_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_attribute, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.UpdateDataAttributeRequest): - request = data_taxonomy.UpdateDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_attribute is not None: - request.data_attribute = data_attribute - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_attribute.name", request.data_attribute.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - data_taxonomy.DataAttribute, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_data_attribute(self, - request: Optional[Union[data_taxonomy.DeleteDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteDataAttributeRequest, dict]): - The request object. Delete DataAttribute request. - name (str): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - warnings.warn("DataTaxonomyServiceClient.delete_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.DeleteDataAttributeRequest): - request = data_taxonomy.DeleteDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_attributes(self, - request: Optional[Union[data_taxonomy.ListDataAttributesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataAttributesPager: - r"""Lists Data Attribute resources in a DataTaxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListDataAttributesRequest, dict]): - The request object. List DataAttributes request. - parent (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager: - List DataAttributes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - warnings.warn("DataTaxonomyServiceClient.list_data_attributes is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.ListDataAttributesRequest): - request = data_taxonomy.ListDataAttributesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_attributes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataAttributesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_attribute(self, - request: Optional[Union[data_taxonomy.GetDataAttributeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> data_taxonomy.DataAttribute: - r"""Retrieves a Data Attribute resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetDataAttributeRequest, dict]): - The request object. Get DataAttribute request. - name (str): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.DataAttribute: - Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a - hierarchy. A single dataAttribute resource can - contain specs of multiple types - - :literal:`` PII - ResourceAccessSpec : - readers :foo@bar.com - DataAccessSpec : - readers :bar@foo.com`\ \` - - """ - warnings.warn("DataTaxonomyServiceClient.get_data_attribute is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, data_taxonomy.GetDataAttributeRequest): - request = data_taxonomy.GetDataAttributeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_attribute] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataTaxonomyServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DataTaxonomyServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py deleted file mode 100644 index 1b187755306b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/pagers.py +++ /dev/null @@ -1,444 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy - - -class ListDataTaxonomiesPager: - """A pager for iterating through ``list_data_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_taxonomies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataTaxonomies`` requests and continue to iterate - through the ``data_taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataTaxonomiesResponse], - request: data_taxonomy.ListDataTaxonomiesRequest, - response: data_taxonomy.ListDataTaxonomiesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataTaxonomiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataTaxonomy]: - for page in self.pages: - yield from page.data_taxonomies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataTaxonomiesAsyncPager: - """A pager for iterating through ``list_data_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_taxonomies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataTaxonomies`` requests and continue to iterate - through the ``data_taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataTaxonomiesResponse]], - request: data_taxonomy.ListDataTaxonomiesRequest, - response: data_taxonomy.ListDataTaxonomiesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataTaxonomiesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataTaxonomiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataTaxonomy]: - async def async_generator(): - async for page in self.pages: - for response in page.data_taxonomies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributeBindingsPager: - """A pager for iterating through ``list_data_attribute_bindings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_attribute_bindings`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataAttributeBindings`` requests and continue to iterate - through the ``data_attribute_bindings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataAttributeBindingsResponse], - request: data_taxonomy.ListDataAttributeBindingsRequest, - response: data_taxonomy.ListDataAttributeBindingsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataAttributeBindingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataAttributeBinding]: - for page in self.pages: - yield from page.data_attribute_bindings - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributeBindingsAsyncPager: - """A pager for iterating through ``list_data_attribute_bindings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_attribute_bindings`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataAttributeBindings`` requests and continue to iterate - through the ``data_attribute_bindings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]], - request: data_taxonomy.ListDataAttributeBindingsRequest, - response: data_taxonomy.ListDataAttributeBindingsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributeBindingsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributeBindingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributeBindingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttributeBinding]: - async def async_generator(): - async for page in self.pages: - for response in page.data_attribute_bindings: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributesPager: - """A pager for iterating through ``list_data_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_attributes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataAttributes`` requests and continue to iterate - through the ``data_attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., data_taxonomy.ListDataAttributesResponse], - request: data_taxonomy.ListDataAttributesRequest, - response: data_taxonomy.ListDataAttributesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[data_taxonomy.ListDataAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[data_taxonomy.DataAttribute]: - for page in self.pages: - yield from page.data_attributes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataAttributesAsyncPager: - """A pager for iterating through ``list_data_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_attributes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataAttributes`` requests and continue to iterate - through the ``data_attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListDataAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[data_taxonomy.ListDataAttributesResponse]], - request: data_taxonomy.ListDataAttributesRequest, - response: data_taxonomy.ListDataAttributesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListDataAttributesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListDataAttributesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = data_taxonomy.ListDataAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[data_taxonomy.ListDataAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[data_taxonomy.DataAttribute]: - async def async_generator(): - async for page in self.pages: - for response in page.data_attributes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst deleted file mode 100644 index 5c194fc01362..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataTaxonomyServiceTransport` is the ABC for all transports. -- public child `DataTaxonomyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataTaxonomyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataTaxonomyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataTaxonomyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py deleted file mode 100644 index 4e06f5ff1989..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataTaxonomyServiceTransport -from .grpc import DataTaxonomyServiceGrpcTransport -from .grpc_asyncio import DataTaxonomyServiceGrpcAsyncIOTransport -from .rest import DataTaxonomyServiceRestTransport -from .rest import DataTaxonomyServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataTaxonomyServiceTransport]] -_transport_registry['grpc'] = DataTaxonomyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataTaxonomyServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DataTaxonomyServiceRestTransport - -__all__ = ( - 'DataTaxonomyServiceTransport', - 'DataTaxonomyServiceGrpcTransport', - 'DataTaxonomyServiceGrpcAsyncIOTransport', - 'DataTaxonomyServiceRestTransport', - 'DataTaxonomyServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py deleted file mode 100644 index 2a76beefdeac..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/base.py +++ /dev/null @@ -1,448 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataTaxonomyServiceTransport(abc.ABC): - """Abstract transport class for DataTaxonomyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_taxonomy: gapic_v1.method.wrap_method( - self.create_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_data_taxonomy: gapic_v1.method.wrap_method( - self.update_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_taxonomy: gapic_v1.method.wrap_method( - self.delete_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_data_taxonomies: gapic_v1.method.wrap_method( - self.list_data_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_data_taxonomy: gapic_v1.method.wrap_method( - self.get_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute_binding: gapic_v1.method.wrap_method( - self.create_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute_binding: gapic_v1.method.wrap_method( - self.update_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute_binding: gapic_v1.method.wrap_method( - self.delete_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attribute_bindings: gapic_v1.method.wrap_method( - self.list_data_attribute_bindings, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute_binding: gapic_v1.method.wrap_method( - self.get_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute: gapic_v1.method.wrap_method( - self.create_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute: gapic_v1.method.wrap_method( - self.update_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute: gapic_v1.method.wrap_method( - self.delete_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attributes: gapic_v1.method.wrap_method( - self.list_data_attributes, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute: gapic_v1.method.wrap_method( - self.get_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - Union[ - data_taxonomy.ListDataTaxonomiesResponse, - Awaitable[data_taxonomy.ListDataTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - Union[ - data_taxonomy.DataTaxonomy, - Awaitable[data_taxonomy.DataTaxonomy] - ]]: - raise NotImplementedError() - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - Union[ - data_taxonomy.ListDataAttributeBindingsResponse, - Awaitable[data_taxonomy.ListDataAttributeBindingsResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - Union[ - data_taxonomy.DataAttributeBinding, - Awaitable[data_taxonomy.DataAttributeBinding] - ]]: - raise NotImplementedError() - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - Union[ - data_taxonomy.ListDataAttributesResponse, - Awaitable[data_taxonomy.ListDataAttributesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - Union[ - data_taxonomy.DataAttribute, - Awaitable[data_taxonomy.DataAttribute] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataTaxonomyServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py deleted file mode 100644 index 28127ba1c292..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc.py +++ /dev/null @@ -1,849 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataTaxonomyServiceGrpcTransport(DataTaxonomyServiceTransport): - """gRPC backend transport for DataTaxonomyService. - - DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data taxonomy method over gRPC. - - Create a DataTaxonomy resource. - - Returns: - Callable[[~.CreateDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_taxonomy' not in self._stubs: - self._stubs['create_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', - request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_taxonomy'] - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data taxonomy method over gRPC. - - Updates a DataTaxonomy resource. - - Returns: - Callable[[~.UpdateDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_taxonomy' not in self._stubs: - self._stubs['update_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', - request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_taxonomy'] - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data taxonomy method over gRPC. - - Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - Returns: - Callable[[~.DeleteDataTaxonomyRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_taxonomy' not in self._stubs: - self._stubs['delete_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', - request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_taxonomy'] - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - data_taxonomy.ListDataTaxonomiesResponse]: - r"""Return a callable for the list data taxonomies method over gRPC. - - Lists DataTaxonomy resources in a project and - location. - - Returns: - Callable[[~.ListDataTaxonomiesRequest], - ~.ListDataTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_taxonomies' not in self._stubs: - self._stubs['list_data_taxonomies'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', - request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, - response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, - ) - return self._stubs['list_data_taxonomies'] - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - data_taxonomy.DataTaxonomy]: - r"""Return a callable for the get data taxonomy method over gRPC. - - Retrieves a DataTaxonomy resource. - - Returns: - Callable[[~.GetDataTaxonomyRequest], - ~.DataTaxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_taxonomy' not in self._stubs: - self._stubs['get_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', - request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, - response_deserializer=data_taxonomy.DataTaxonomy.deserialize, - ) - return self._stubs['get_data_taxonomy'] - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data attribute binding method over gRPC. - - Create a DataAttributeBinding resource. - - Returns: - Callable[[~.CreateDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute_binding' not in self._stubs: - self._stubs['create_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', - request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute_binding'] - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data attribute binding method over gRPC. - - Updates a DataAttributeBinding resource. - - Returns: - Callable[[~.UpdateDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute_binding' not in self._stubs: - self._stubs['update_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', - request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute_binding'] - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data attribute binding method over gRPC. - - Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - Returns: - Callable[[~.DeleteDataAttributeBindingRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute_binding' not in self._stubs: - self._stubs['delete_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', - request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute_binding'] - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - data_taxonomy.ListDataAttributeBindingsResponse]: - r"""Return a callable for the list data attribute bindings method over gRPC. - - Lists DataAttributeBinding resources in a project and - location. - - Returns: - Callable[[~.ListDataAttributeBindingsRequest], - ~.ListDataAttributeBindingsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attribute_bindings' not in self._stubs: - self._stubs['list_data_attribute_bindings'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', - request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, - ) - return self._stubs['list_data_attribute_bindings'] - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - data_taxonomy.DataAttributeBinding]: - r"""Return a callable for the get data attribute binding method over gRPC. - - Retrieves a DataAttributeBinding resource. - - Returns: - Callable[[~.GetDataAttributeBindingRequest], - ~.DataAttributeBinding]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute_binding' not in self._stubs: - self._stubs['get_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', - request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, - response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, - ) - return self._stubs['get_data_attribute_binding'] - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create data attribute method over gRPC. - - Create a DataAttribute resource. - - Returns: - Callable[[~.CreateDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute' not in self._stubs: - self._stubs['create_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', - request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute'] - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data attribute method over gRPC. - - Updates a DataAttribute resource. - - Returns: - Callable[[~.UpdateDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute' not in self._stubs: - self._stubs['update_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', - request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute'] - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete data attribute method over gRPC. - - Deletes a Data Attribute resource. - - Returns: - Callable[[~.DeleteDataAttributeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute' not in self._stubs: - self._stubs['delete_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', - request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute'] - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - data_taxonomy.ListDataAttributesResponse]: - r"""Return a callable for the list data attributes method over gRPC. - - Lists Data Attribute resources in a DataTaxonomy. - - Returns: - Callable[[~.ListDataAttributesRequest], - ~.ListDataAttributesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attributes' not in self._stubs: - self._stubs['list_data_attributes'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', - request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, - ) - return self._stubs['list_data_attributes'] - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - data_taxonomy.DataAttribute]: - r"""Return a callable for the get data attribute method over gRPC. - - Retrieves a Data Attribute resource. - - Returns: - Callable[[~.GetDataAttributeRequest], - ~.DataAttribute]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute' not in self._stubs: - self._stubs['get_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', - request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, - response_deserializer=data_taxonomy.DataAttribute.deserialize, - ) - return self._stubs['get_data_attribute'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataTaxonomyServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py deleted file mode 100644 index cc240458b4cd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,970 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataTaxonomyServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataTaxonomyServiceGrpcAsyncIOTransport(DataTaxonomyServiceTransport): - """gRPC AsyncIO backend transport for DataTaxonomyService. - - DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data taxonomy method over gRPC. - - Create a DataTaxonomy resource. - - Returns: - Callable[[~.CreateDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_taxonomy' not in self._stubs: - self._stubs['create_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataTaxonomy', - request_serializer=gcd_data_taxonomy.CreateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_taxonomy'] - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data taxonomy method over gRPC. - - Updates a DataTaxonomy resource. - - Returns: - Callable[[~.UpdateDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_taxonomy' not in self._stubs: - self._stubs['update_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataTaxonomy', - request_serializer=gcd_data_taxonomy.UpdateDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_taxonomy'] - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data taxonomy method over gRPC. - - Deletes a DataTaxonomy resource. All attributes - within the DataTaxonomy must be deleted before the - DataTaxonomy can be deleted. - - Returns: - Callable[[~.DeleteDataTaxonomyRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_taxonomy' not in self._stubs: - self._stubs['delete_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataTaxonomy', - request_serializer=data_taxonomy.DeleteDataTaxonomyRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_taxonomy'] - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - Awaitable[data_taxonomy.ListDataTaxonomiesResponse]]: - r"""Return a callable for the list data taxonomies method over gRPC. - - Lists DataTaxonomy resources in a project and - location. - - Returns: - Callable[[~.ListDataTaxonomiesRequest], - Awaitable[~.ListDataTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_taxonomies' not in self._stubs: - self._stubs['list_data_taxonomies'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataTaxonomies', - request_serializer=data_taxonomy.ListDataTaxonomiesRequest.serialize, - response_deserializer=data_taxonomy.ListDataTaxonomiesResponse.deserialize, - ) - return self._stubs['list_data_taxonomies'] - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - Awaitable[data_taxonomy.DataTaxonomy]]: - r"""Return a callable for the get data taxonomy method over gRPC. - - Retrieves a DataTaxonomy resource. - - Returns: - Callable[[~.GetDataTaxonomyRequest], - Awaitable[~.DataTaxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_taxonomy' not in self._stubs: - self._stubs['get_data_taxonomy'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataTaxonomy', - request_serializer=data_taxonomy.GetDataTaxonomyRequest.serialize, - response_deserializer=data_taxonomy.DataTaxonomy.deserialize, - ) - return self._stubs['get_data_taxonomy'] - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data attribute binding method over gRPC. - - Create a DataAttributeBinding resource. - - Returns: - Callable[[~.CreateDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute_binding' not in self._stubs: - self._stubs['create_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttributeBinding', - request_serializer=data_taxonomy.CreateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute_binding'] - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data attribute binding method over gRPC. - - Updates a DataAttributeBinding resource. - - Returns: - Callable[[~.UpdateDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute_binding' not in self._stubs: - self._stubs['update_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttributeBinding', - request_serializer=data_taxonomy.UpdateDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute_binding'] - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data attribute binding method over gRPC. - - Deletes a DataAttributeBinding resource. All - attributes within the DataAttributeBinding must be - deleted before the DataAttributeBinding can be deleted. - - Returns: - Callable[[~.DeleteDataAttributeBindingRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute_binding' not in self._stubs: - self._stubs['delete_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttributeBinding', - request_serializer=data_taxonomy.DeleteDataAttributeBindingRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute_binding'] - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - Awaitable[data_taxonomy.ListDataAttributeBindingsResponse]]: - r"""Return a callable for the list data attribute bindings method over gRPC. - - Lists DataAttributeBinding resources in a project and - location. - - Returns: - Callable[[~.ListDataAttributeBindingsRequest], - Awaitable[~.ListDataAttributeBindingsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attribute_bindings' not in self._stubs: - self._stubs['list_data_attribute_bindings'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributeBindings', - request_serializer=data_taxonomy.ListDataAttributeBindingsRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributeBindingsResponse.deserialize, - ) - return self._stubs['list_data_attribute_bindings'] - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - Awaitable[data_taxonomy.DataAttributeBinding]]: - r"""Return a callable for the get data attribute binding method over gRPC. - - Retrieves a DataAttributeBinding resource. - - Returns: - Callable[[~.GetDataAttributeBindingRequest], - Awaitable[~.DataAttributeBinding]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute_binding' not in self._stubs: - self._stubs['get_data_attribute_binding'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttributeBinding', - request_serializer=data_taxonomy.GetDataAttributeBindingRequest.serialize, - response_deserializer=data_taxonomy.DataAttributeBinding.deserialize, - ) - return self._stubs['get_data_attribute_binding'] - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create data attribute method over gRPC. - - Create a DataAttribute resource. - - Returns: - Callable[[~.CreateDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_attribute' not in self._stubs: - self._stubs['create_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/CreateDataAttribute', - request_serializer=data_taxonomy.CreateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_data_attribute'] - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data attribute method over gRPC. - - Updates a DataAttribute resource. - - Returns: - Callable[[~.UpdateDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_attribute' not in self._stubs: - self._stubs['update_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/UpdateDataAttribute', - request_serializer=data_taxonomy.UpdateDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_attribute'] - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete data attribute method over gRPC. - - Deletes a Data Attribute resource. - - Returns: - Callable[[~.DeleteDataAttributeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_attribute' not in self._stubs: - self._stubs['delete_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/DeleteDataAttribute', - request_serializer=data_taxonomy.DeleteDataAttributeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_data_attribute'] - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - Awaitable[data_taxonomy.ListDataAttributesResponse]]: - r"""Return a callable for the list data attributes method over gRPC. - - Lists Data Attribute resources in a DataTaxonomy. - - Returns: - Callable[[~.ListDataAttributesRequest], - Awaitable[~.ListDataAttributesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_attributes' not in self._stubs: - self._stubs['list_data_attributes'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/ListDataAttributes', - request_serializer=data_taxonomy.ListDataAttributesRequest.serialize, - response_deserializer=data_taxonomy.ListDataAttributesResponse.deserialize, - ) - return self._stubs['list_data_attributes'] - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - Awaitable[data_taxonomy.DataAttribute]]: - r"""Return a callable for the get data attribute method over gRPC. - - Retrieves a Data Attribute resource. - - Returns: - Callable[[~.GetDataAttributeRequest], - Awaitable[~.DataAttribute]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_attribute' not in self._stubs: - self._stubs['get_data_attribute'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataTaxonomyService/GetDataAttribute', - request_serializer=data_taxonomy.GetDataAttributeRequest.serialize, - response_deserializer=data_taxonomy.DataAttribute.deserialize, - ) - return self._stubs['get_data_attribute'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_taxonomy: self._wrap_method( - self.create_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_data_taxonomy: self._wrap_method( - self.update_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_taxonomy: self._wrap_method( - self.delete_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_data_taxonomies: self._wrap_method( - self.list_data_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_data_taxonomy: self._wrap_method( - self.get_data_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute_binding: self._wrap_method( - self.create_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute_binding: self._wrap_method( - self.update_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute_binding: self._wrap_method( - self.delete_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attribute_bindings: self._wrap_method( - self.list_data_attribute_bindings, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute_binding: self._wrap_method( - self.get_data_attribute_binding, - default_timeout=None, - client_info=client_info, - ), - self.create_data_attribute: self._wrap_method( - self.create_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.update_data_attribute: self._wrap_method( - self.update_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_attribute: self._wrap_method( - self.delete_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.list_data_attributes: self._wrap_method( - self.list_data_attributes, - default_timeout=None, - client_info=client_info, - ), - self.get_data_attribute: self._wrap_method( - self.get_data_attribute, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataTaxonomyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py deleted file mode 100644 index 34e996b419df..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py +++ /dev/null @@ -1,3660 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseDataTaxonomyServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataTaxonomyServiceRestInterceptor: - """Interceptor for DataTaxonomyService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DataTaxonomyServiceRestTransport. - - .. code-block:: python - class MyCustomDataTaxonomyServiceInterceptor(DataTaxonomyServiceRestInterceptor): - def pre_create_data_attribute(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_data_attribute(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_data_attribute_binding(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_data_attribute_binding(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_data_taxonomy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_data_taxonomy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_data_attribute(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_data_attribute(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_data_attribute_binding(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_data_attribute_binding(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_data_taxonomy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_data_taxonomy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_attribute(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_attribute(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_attribute_binding(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_attribute_binding(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_taxonomy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_taxonomy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_attribute_bindings(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_attribute_bindings(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_attributes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_attributes(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_taxonomies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_taxonomies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_attribute(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_attribute(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_attribute_binding(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_attribute_binding(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_taxonomy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_taxonomy(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DataTaxonomyServiceRestTransport(interceptor=MyCustomDataTaxonomyServiceInterceptor()) - client = DataTaxonomyServiceClient(transport=transport) - - - """ - def pre_create_data_attribute(self, request: data_taxonomy.CreateDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.CreateDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_data_attribute - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_create_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_data_attribute - - DEPRECATED. Please use the `post_create_data_attribute_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_create_data_attribute` interceptor runs - before the `post_create_data_attribute_with_metadata` interceptor. - """ - return response - - def post_create_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_data_attribute - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_create_data_attribute_with_metadata` - interceptor in new development instead of the `post_create_data_attribute` interceptor. - When both interceptors are used, this `post_create_data_attribute_with_metadata` interceptor runs after the - `post_create_data_attribute` interceptor. The (possibly modified) response returned by - `post_create_data_attribute` will be passed to - `post_create_data_attribute_with_metadata`. - """ - return response, metadata - - def pre_create_data_attribute_binding(self, request: data_taxonomy.CreateDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.CreateDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_data_attribute_binding - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_create_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_data_attribute_binding - - DEPRECATED. Please use the `post_create_data_attribute_binding_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_create_data_attribute_binding` interceptor runs - before the `post_create_data_attribute_binding_with_metadata` interceptor. - """ - return response - - def post_create_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_data_attribute_binding - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_create_data_attribute_binding_with_metadata` - interceptor in new development instead of the `post_create_data_attribute_binding` interceptor. - When both interceptors are used, this `post_create_data_attribute_binding_with_metadata` interceptor runs after the - `post_create_data_attribute_binding` interceptor. The (possibly modified) response returned by - `post_create_data_attribute_binding` will be passed to - `post_create_data_attribute_binding_with_metadata`. - """ - return response, metadata - - def pre_create_data_taxonomy(self, request: gcd_data_taxonomy.CreateDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_data_taxonomy.CreateDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_data_taxonomy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_create_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_data_taxonomy - - DEPRECATED. Please use the `post_create_data_taxonomy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_create_data_taxonomy` interceptor runs - before the `post_create_data_taxonomy_with_metadata` interceptor. - """ - return response - - def post_create_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_data_taxonomy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_create_data_taxonomy_with_metadata` - interceptor in new development instead of the `post_create_data_taxonomy` interceptor. - When both interceptors are used, this `post_create_data_taxonomy_with_metadata` interceptor runs after the - `post_create_data_taxonomy` interceptor. The (possibly modified) response returned by - `post_create_data_taxonomy` will be passed to - `post_create_data_taxonomy_with_metadata`. - """ - return response, metadata - - def pre_delete_data_attribute(self, request: data_taxonomy.DeleteDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_data_attribute - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_delete_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_data_attribute - - DEPRECATED. Please use the `post_delete_data_attribute_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_delete_data_attribute` interceptor runs - before the `post_delete_data_attribute_with_metadata` interceptor. - """ - return response - - def post_delete_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_data_attribute - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_delete_data_attribute_with_metadata` - interceptor in new development instead of the `post_delete_data_attribute` interceptor. - When both interceptors are used, this `post_delete_data_attribute_with_metadata` interceptor runs after the - `post_delete_data_attribute` interceptor. The (possibly modified) response returned by - `post_delete_data_attribute` will be passed to - `post_delete_data_attribute_with_metadata`. - """ - return response, metadata - - def pre_delete_data_attribute_binding(self, request: data_taxonomy.DeleteDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_data_attribute_binding - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_delete_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_data_attribute_binding - - DEPRECATED. Please use the `post_delete_data_attribute_binding_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_delete_data_attribute_binding` interceptor runs - before the `post_delete_data_attribute_binding_with_metadata` interceptor. - """ - return response - - def post_delete_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_data_attribute_binding - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_delete_data_attribute_binding_with_metadata` - interceptor in new development instead of the `post_delete_data_attribute_binding` interceptor. - When both interceptors are used, this `post_delete_data_attribute_binding_with_metadata` interceptor runs after the - `post_delete_data_attribute_binding` interceptor. The (possibly modified) response returned by - `post_delete_data_attribute_binding` will be passed to - `post_delete_data_attribute_binding_with_metadata`. - """ - return response, metadata - - def pre_delete_data_taxonomy(self, request: data_taxonomy.DeleteDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DeleteDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_data_taxonomy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_delete_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_data_taxonomy - - DEPRECATED. Please use the `post_delete_data_taxonomy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_delete_data_taxonomy` interceptor runs - before the `post_delete_data_taxonomy_with_metadata` interceptor. - """ - return response - - def post_delete_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_data_taxonomy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_delete_data_taxonomy_with_metadata` - interceptor in new development instead of the `post_delete_data_taxonomy` interceptor. - When both interceptors are used, this `post_delete_data_taxonomy_with_metadata` interceptor runs after the - `post_delete_data_taxonomy` interceptor. The (possibly modified) response returned by - `post_delete_data_taxonomy` will be passed to - `post_delete_data_taxonomy_with_metadata`. - """ - return response, metadata - - def pre_get_data_attribute(self, request: data_taxonomy.GetDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_attribute - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_get_data_attribute(self, response: data_taxonomy.DataAttribute) -> data_taxonomy.DataAttribute: - """Post-rpc interceptor for get_data_attribute - - DEPRECATED. Please use the `post_get_data_attribute_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_get_data_attribute` interceptor runs - before the `post_get_data_attribute_with_metadata` interceptor. - """ - return response - - def post_get_data_attribute_with_metadata(self, response: data_taxonomy.DataAttribute, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataAttribute, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_attribute - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_get_data_attribute_with_metadata` - interceptor in new development instead of the `post_get_data_attribute` interceptor. - When both interceptors are used, this `post_get_data_attribute_with_metadata` interceptor runs after the - `post_get_data_attribute` interceptor. The (possibly modified) response returned by - `post_get_data_attribute` will be passed to - `post_get_data_attribute_with_metadata`. - """ - return response, metadata - - def pre_get_data_attribute_binding(self, request: data_taxonomy.GetDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_attribute_binding - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_get_data_attribute_binding(self, response: data_taxonomy.DataAttributeBinding) -> data_taxonomy.DataAttributeBinding: - """Post-rpc interceptor for get_data_attribute_binding - - DEPRECATED. Please use the `post_get_data_attribute_binding_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_get_data_attribute_binding` interceptor runs - before the `post_get_data_attribute_binding_with_metadata` interceptor. - """ - return response - - def post_get_data_attribute_binding_with_metadata(self, response: data_taxonomy.DataAttributeBinding, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataAttributeBinding, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_attribute_binding - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_get_data_attribute_binding_with_metadata` - interceptor in new development instead of the `post_get_data_attribute_binding` interceptor. - When both interceptors are used, this `post_get_data_attribute_binding_with_metadata` interceptor runs after the - `post_get_data_attribute_binding` interceptor. The (possibly modified) response returned by - `post_get_data_attribute_binding` will be passed to - `post_get_data_attribute_binding_with_metadata`. - """ - return response, metadata - - def pre_get_data_taxonomy(self, request: data_taxonomy.GetDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.GetDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_taxonomy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_get_data_taxonomy(self, response: data_taxonomy.DataTaxonomy) -> data_taxonomy.DataTaxonomy: - """Post-rpc interceptor for get_data_taxonomy - - DEPRECATED. Please use the `post_get_data_taxonomy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_get_data_taxonomy` interceptor runs - before the `post_get_data_taxonomy_with_metadata` interceptor. - """ - return response - - def post_get_data_taxonomy_with_metadata(self, response: data_taxonomy.DataTaxonomy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.DataTaxonomy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_taxonomy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_get_data_taxonomy_with_metadata` - interceptor in new development instead of the `post_get_data_taxonomy` interceptor. - When both interceptors are used, this `post_get_data_taxonomy_with_metadata` interceptor runs after the - `post_get_data_taxonomy` interceptor. The (possibly modified) response returned by - `post_get_data_taxonomy` will be passed to - `post_get_data_taxonomy_with_metadata`. - """ - return response, metadata - - def pre_list_data_attribute_bindings(self, request: data_taxonomy.ListDataAttributeBindingsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributeBindingsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_attribute_bindings - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_list_data_attribute_bindings(self, response: data_taxonomy.ListDataAttributeBindingsResponse) -> data_taxonomy.ListDataAttributeBindingsResponse: - """Post-rpc interceptor for list_data_attribute_bindings - - DEPRECATED. Please use the `post_list_data_attribute_bindings_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_list_data_attribute_bindings` interceptor runs - before the `post_list_data_attribute_bindings_with_metadata` interceptor. - """ - return response - - def post_list_data_attribute_bindings_with_metadata(self, response: data_taxonomy.ListDataAttributeBindingsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributeBindingsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_attribute_bindings - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_list_data_attribute_bindings_with_metadata` - interceptor in new development instead of the `post_list_data_attribute_bindings` interceptor. - When both interceptors are used, this `post_list_data_attribute_bindings_with_metadata` interceptor runs after the - `post_list_data_attribute_bindings` interceptor. The (possibly modified) response returned by - `post_list_data_attribute_bindings` will be passed to - `post_list_data_attribute_bindings_with_metadata`. - """ - return response, metadata - - def pre_list_data_attributes(self, request: data_taxonomy.ListDataAttributesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_attributes - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_list_data_attributes(self, response: data_taxonomy.ListDataAttributesResponse) -> data_taxonomy.ListDataAttributesResponse: - """Post-rpc interceptor for list_data_attributes - - DEPRECATED. Please use the `post_list_data_attributes_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_list_data_attributes` interceptor runs - before the `post_list_data_attributes_with_metadata` interceptor. - """ - return response - - def post_list_data_attributes_with_metadata(self, response: data_taxonomy.ListDataAttributesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataAttributesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_attributes - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_list_data_attributes_with_metadata` - interceptor in new development instead of the `post_list_data_attributes` interceptor. - When both interceptors are used, this `post_list_data_attributes_with_metadata` interceptor runs after the - `post_list_data_attributes` interceptor. The (possibly modified) response returned by - `post_list_data_attributes` will be passed to - `post_list_data_attributes_with_metadata`. - """ - return response, metadata - - def pre_list_data_taxonomies(self, request: data_taxonomy.ListDataTaxonomiesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataTaxonomiesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_taxonomies - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_list_data_taxonomies(self, response: data_taxonomy.ListDataTaxonomiesResponse) -> data_taxonomy.ListDataTaxonomiesResponse: - """Post-rpc interceptor for list_data_taxonomies - - DEPRECATED. Please use the `post_list_data_taxonomies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_list_data_taxonomies` interceptor runs - before the `post_list_data_taxonomies_with_metadata` interceptor. - """ - return response - - def post_list_data_taxonomies_with_metadata(self, response: data_taxonomy.ListDataTaxonomiesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.ListDataTaxonomiesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_taxonomies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_list_data_taxonomies_with_metadata` - interceptor in new development instead of the `post_list_data_taxonomies` interceptor. - When both interceptors are used, this `post_list_data_taxonomies_with_metadata` interceptor runs after the - `post_list_data_taxonomies` interceptor. The (possibly modified) response returned by - `post_list_data_taxonomies` will be passed to - `post_list_data_taxonomies_with_metadata`. - """ - return response, metadata - - def pre_update_data_attribute(self, request: data_taxonomy.UpdateDataAttributeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.UpdateDataAttributeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_attribute - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_update_data_attribute(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_attribute - - DEPRECATED. Please use the `post_update_data_attribute_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_update_data_attribute` interceptor runs - before the `post_update_data_attribute_with_metadata` interceptor. - """ - return response - - def post_update_data_attribute_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_attribute - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_update_data_attribute_with_metadata` - interceptor in new development instead of the `post_update_data_attribute` interceptor. - When both interceptors are used, this `post_update_data_attribute_with_metadata` interceptor runs after the - `post_update_data_attribute` interceptor. The (possibly modified) response returned by - `post_update_data_attribute` will be passed to - `post_update_data_attribute_with_metadata`. - """ - return response, metadata - - def pre_update_data_attribute_binding(self, request: data_taxonomy.UpdateDataAttributeBindingRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[data_taxonomy.UpdateDataAttributeBindingRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_attribute_binding - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_update_data_attribute_binding(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_attribute_binding - - DEPRECATED. Please use the `post_update_data_attribute_binding_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_update_data_attribute_binding` interceptor runs - before the `post_update_data_attribute_binding_with_metadata` interceptor. - """ - return response - - def post_update_data_attribute_binding_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_attribute_binding - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_update_data_attribute_binding_with_metadata` - interceptor in new development instead of the `post_update_data_attribute_binding` interceptor. - When both interceptors are used, this `post_update_data_attribute_binding_with_metadata` interceptor runs after the - `post_update_data_attribute_binding` interceptor. The (possibly modified) response returned by - `post_update_data_attribute_binding` will be passed to - `post_update_data_attribute_binding_with_metadata`. - """ - return response, metadata - - def pre_update_data_taxonomy(self, request: gcd_data_taxonomy.UpdateDataTaxonomyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcd_data_taxonomy.UpdateDataTaxonomyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_taxonomy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_update_data_taxonomy(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_taxonomy - - DEPRECATED. Please use the `post_update_data_taxonomy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. This `post_update_data_taxonomy` interceptor runs - before the `post_update_data_taxonomy_with_metadata` interceptor. - """ - return response - - def post_update_data_taxonomy_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_taxonomy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTaxonomyService server but before it is returned to user code. - - We recommend only using this `post_update_data_taxonomy_with_metadata` - interceptor in new development instead of the `post_update_data_taxonomy` interceptor. - When both interceptors are used, this `post_update_data_taxonomy_with_metadata` interceptor runs after the - `post_update_data_taxonomy` interceptor. The (possibly modified) response returned by - `post_update_data_taxonomy` will be passed to - `post_update_data_taxonomy_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTaxonomyService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DataTaxonomyService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DataTaxonomyServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DataTaxonomyServiceRestInterceptor - - -class DataTaxonomyServiceRestTransport(_BaseDataTaxonomyServiceRestTransport): - """REST backend synchronous transport for DataTaxonomyService. - - DataTaxonomyService enables attribute-based governance. The - resources currently offered include DataTaxonomy and - DataAttribute. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DataTaxonomyServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DataTaxonomyServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.CreateDataAttribute") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: data_taxonomy.CreateDataAttributeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create data attribute method over HTTP. - - Args: - request (~.data_taxonomy.CreateDataAttributeRequest): - The request object. Create DataAttribute request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_http_options() - - request, metadata = self._interceptor.pre_create_data_attribute(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataAttribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataAttribute", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._CreateDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_data_attribute(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_data_attribute_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataAttribute", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.CreateDataAttributeBinding") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: data_taxonomy.CreateDataAttributeBindingRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create data attribute - binding method over HTTP. - - Args: - request (~.data_taxonomy.CreateDataAttributeBindingRequest): - The request object. Create DataAttributeBinding request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_http_options() - - request, metadata = self._interceptor.pre_create_data_attribute_binding(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataAttributeBinding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataAttributeBinding", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._CreateDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_data_attribute_binding(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_data_attribute_binding_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataAttributeBinding", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.CreateDataTaxonomy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcd_data_taxonomy.CreateDataTaxonomyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create data taxonomy method over HTTP. - - Args: - request (~.gcd_data_taxonomy.CreateDataTaxonomyRequest): - The request object. Create DataTaxonomy request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_http_options() - - request, metadata = self._interceptor.pre_create_data_taxonomy(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CreateDataTaxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataTaxonomy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._CreateDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_data_taxonomy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_data_taxonomy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CreateDataTaxonomy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.DeleteDataAttribute") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.DeleteDataAttributeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete data attribute method over HTTP. - - Args: - request (~.data_taxonomy.DeleteDataAttributeRequest): - The request object. Delete DataAttribute request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_http_options() - - request, metadata = self._interceptor.pre_delete_data_attribute(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataAttribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataAttribute", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._DeleteDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_data_attribute(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_data_attribute_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataAttribute", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.DeleteDataAttributeBinding") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.DeleteDataAttributeBindingRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete data attribute - binding method over HTTP. - - Args: - request (~.data_taxonomy.DeleteDataAttributeBindingRequest): - The request object. Delete DataAttributeBinding request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_http_options() - - request, metadata = self._interceptor.pre_delete_data_attribute_binding(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataAttributeBinding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataAttributeBinding", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._DeleteDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_data_attribute_binding(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_data_attribute_binding_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataAttributeBinding", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.DeleteDataTaxonomy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.DeleteDataTaxonomyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete data taxonomy method over HTTP. - - Args: - request (~.data_taxonomy.DeleteDataTaxonomyRequest): - The request object. Delete DataTaxonomy request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_http_options() - - request, metadata = self._interceptor.pre_delete_data_taxonomy(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteDataTaxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataTaxonomy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._DeleteDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_data_taxonomy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_data_taxonomy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteDataTaxonomy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.GetDataAttribute") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.GetDataAttributeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.DataAttribute: - r"""Call the get data attribute method over HTTP. - - Args: - request (~.data_taxonomy.GetDataAttributeRequest): - The request object. Get DataAttribute request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.DataAttribute: - Denotes one dataAttribute in a dataTaxonomy, for - example, PII. DataAttribute resources can be defined in - a hierarchy. A single dataAttribute resource can contain - specs of multiple types - - :: - - PII - - ResourceAccessSpec : - - readers :foo@bar.com - - DataAccessSpec : - - readers :bar@foo.com - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_http_options() - - request, metadata = self._interceptor.pre_get_data_attribute(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataAttribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataAttribute", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._GetDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.DataAttribute() - pb_resp = data_taxonomy.DataAttribute.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_attribute(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_attribute_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.DataAttribute.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataAttribute", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.GetDataAttributeBinding") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.GetDataAttributeBindingRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.DataAttributeBinding: - r"""Call the get data attribute - binding method over HTTP. - - Args: - request (~.data_taxonomy.GetDataAttributeBindingRequest): - The request object. Get DataAttributeBinding request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.DataAttributeBinding: - DataAttributeBinding represents - binding of attributes to resources. Eg: - Bind 'CustomerInfo' entity with 'PII' - attribute. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_http_options() - - request, metadata = self._interceptor.pre_get_data_attribute_binding(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataAttributeBinding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataAttributeBinding", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._GetDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.DataAttributeBinding() - pb_resp = data_taxonomy.DataAttributeBinding.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_attribute_binding(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_attribute_binding_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.DataAttributeBinding.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataAttributeBinding", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.GetDataTaxonomy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.GetDataTaxonomyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.DataTaxonomy: - r"""Call the get data taxonomy method over HTTP. - - Args: - request (~.data_taxonomy.GetDataTaxonomyRequest): - The request object. Get DataTaxonomy request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.DataTaxonomy: - DataTaxonomy represents a set of - hierarchical DataAttributes resources, - grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have - attributes to manage PII data. It is - defined at project level. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_http_options() - - request, metadata = self._interceptor.pre_get_data_taxonomy(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetDataTaxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataTaxonomy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._GetDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.DataTaxonomy() - pb_resp = data_taxonomy.DataTaxonomy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_taxonomy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_taxonomy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.DataTaxonomy.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetDataTaxonomy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataAttributeBindings(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.ListDataAttributeBindings") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.ListDataAttributeBindingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.ListDataAttributeBindingsResponse: - r"""Call the list data attribute - bindings method over HTTP. - - Args: - request (~.data_taxonomy.ListDataAttributeBindingsRequest): - The request object. List DataAttributeBindings request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.ListDataAttributeBindingsResponse: - List DataAttributeBindings response. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_http_options() - - request, metadata = self._interceptor.pre_list_data_attribute_bindings(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataAttributeBindings", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataAttributeBindings", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._ListDataAttributeBindings._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.ListDataAttributeBindingsResponse() - pb_resp = data_taxonomy.ListDataAttributeBindingsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_attribute_bindings(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_attribute_bindings_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.ListDataAttributeBindingsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataAttributeBindings", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataAttributes(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.ListDataAttributes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.ListDataAttributesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.ListDataAttributesResponse: - r"""Call the list data attributes method over HTTP. - - Args: - request (~.data_taxonomy.ListDataAttributesRequest): - The request object. List DataAttributes request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.ListDataAttributesResponse: - List DataAttributes response. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_http_options() - - request, metadata = self._interceptor.pre_list_data_attributes(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataAttributes", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataAttributes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._ListDataAttributes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.ListDataAttributesResponse() - pb_resp = data_taxonomy.ListDataAttributesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_attributes(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_attributes_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.ListDataAttributesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataAttributes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataTaxonomies(_BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.ListDataTaxonomies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: data_taxonomy.ListDataTaxonomiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> data_taxonomy.ListDataTaxonomiesResponse: - r"""Call the list data taxonomies method over HTTP. - - Args: - request (~.data_taxonomy.ListDataTaxonomiesRequest): - The request object. List DataTaxonomies request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.data_taxonomy.ListDataTaxonomiesResponse: - List DataTaxonomies response. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_http_options() - - request, metadata = self._interceptor.pre_list_data_taxonomies(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListDataTaxonomies", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataTaxonomies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._ListDataTaxonomies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = data_taxonomy.ListDataTaxonomiesResponse() - pb_resp = data_taxonomy.ListDataTaxonomiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_taxonomies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_taxonomies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = data_taxonomy.ListDataTaxonomiesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListDataTaxonomies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataAttribute(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.UpdateDataAttribute") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: data_taxonomy.UpdateDataAttributeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update data attribute method over HTTP. - - Args: - request (~.data_taxonomy.UpdateDataAttributeRequest): - The request object. Update DataAttribute request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_http_options() - - request, metadata = self._interceptor.pre_update_data_attribute(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataAttribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataAttribute", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._UpdateDataAttribute._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_attribute(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_attribute_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataAttribute", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataAttributeBinding(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.UpdateDataAttributeBinding") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: data_taxonomy.UpdateDataAttributeBindingRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update data attribute - binding method over HTTP. - - Args: - request (~.data_taxonomy.UpdateDataAttributeBindingRequest): - The request object. Update DataAttributeBinding request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_http_options() - - request, metadata = self._interceptor.pre_update_data_attribute_binding(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataAttributeBinding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataAttributeBinding", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._UpdateDataAttributeBinding._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_attribute_binding(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_attribute_binding_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataAttributeBinding", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataTaxonomy(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.UpdateDataTaxonomy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcd_data_taxonomy.UpdateDataTaxonomyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update data taxonomy method over HTTP. - - Args: - request (~.gcd_data_taxonomy.UpdateDataTaxonomyRequest): - The request object. Update DataTaxonomy request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_http_options() - - request, metadata = self._interceptor.pre_update_data_taxonomy(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.UpdateDataTaxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataTaxonomy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._UpdateDataTaxonomy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_taxonomy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_taxonomy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "UpdateDataTaxonomy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_data_attribute(self) -> Callable[ - [data_taxonomy.CreateDataAttributeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataAttribute(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_data_attribute_binding(self) -> Callable[ - [data_taxonomy.CreateDataAttributeBindingRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.CreateDataTaxonomyRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_data_attribute(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataAttribute(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_data_attribute_binding(self) -> Callable[ - [data_taxonomy.DeleteDataAttributeBindingRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_data_taxonomy(self) -> Callable[ - [data_taxonomy.DeleteDataTaxonomyRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_attribute(self) -> Callable[ - [data_taxonomy.GetDataAttributeRequest], - data_taxonomy.DataAttribute]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataAttribute(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_attribute_binding(self) -> Callable[ - [data_taxonomy.GetDataAttributeBindingRequest], - data_taxonomy.DataAttributeBinding]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_taxonomy(self) -> Callable[ - [data_taxonomy.GetDataTaxonomyRequest], - data_taxonomy.DataTaxonomy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_attribute_bindings(self) -> Callable[ - [data_taxonomy.ListDataAttributeBindingsRequest], - data_taxonomy.ListDataAttributeBindingsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataAttributeBindings(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_attributes(self) -> Callable[ - [data_taxonomy.ListDataAttributesRequest], - data_taxonomy.ListDataAttributesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataAttributes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_taxonomies(self) -> Callable[ - [data_taxonomy.ListDataTaxonomiesRequest], - data_taxonomy.ListDataTaxonomiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataTaxonomies(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_attribute(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataAttribute(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_attribute_binding(self) -> Callable[ - [data_taxonomy.UpdateDataAttributeBindingRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataAttributeBinding(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_taxonomy(self) -> Callable[ - [gcd_data_taxonomy.UpdateDataTaxonomyRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataTaxonomy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseDataTaxonomyServiceRestTransport._BaseGetLocation, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseDataTaxonomyServiceRestTransport._BaseListLocations, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseDataTaxonomyServiceRestTransport._BaseCancelOperation, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseDataTaxonomyServiceRestTransport._BaseGetOperation, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseDataTaxonomyServiceRestTransport._BaseListOperations, DataTaxonomyServiceRestStub): - def __hash__(self): - return hash("DataTaxonomyServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTaxonomyServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataTaxonomyServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTaxonomyServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataTaxonomyService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DataTaxonomyServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py deleted file mode 100644 index 055bf23dd05a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py +++ /dev/null @@ -1,883 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import DataTaxonomyServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseDataTaxonomyServiceRestTransport(DataTaxonomyServiceTransport): - """Base REST backend transport for DataTaxonomyService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateDataAttribute: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "dataAttributeId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes', - 'body': 'data_attribute', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.CreateDataAttributeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttribute._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDataAttributeBinding: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "dataAttributeBindingId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/dataAttributeBindings', - 'body': 'data_attribute_binding', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.CreateDataAttributeBindingRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataAttributeBinding._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDataTaxonomy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "dataTaxonomyId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/dataTaxonomies', - 'body': 'data_taxonomy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcd_data_taxonomy.CreateDataTaxonomyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseCreateDataTaxonomy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataAttribute: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.DeleteDataAttributeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttribute._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataAttributeBinding: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "etag" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/dataAttributeBindings/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.DeleteDataAttributeBindingRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataAttributeBinding._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataTaxonomy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.DeleteDataTaxonomyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseDeleteDataTaxonomy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataAttribute: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.GetDataAttributeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttribute._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataAttributeBinding: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataAttributeBindings/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.GetDataAttributeBindingRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataAttributeBinding._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataTaxonomy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataTaxonomies/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.GetDataTaxonomyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseGetDataTaxonomy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataAttributeBindings: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dataAttributeBindings', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.ListDataAttributeBindingsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributeBindings._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataAttributes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.ListDataAttributesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataAttributes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataTaxonomies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dataTaxonomies', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.ListDataTaxonomiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseListDataTaxonomies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataAttribute: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_attribute.name=projects/*/locations/*/dataTaxonomies/*/attributes/*}', - 'body': 'data_attribute', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.UpdateDataAttributeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttribute._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataAttributeBinding: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_attribute_binding.name=projects/*/locations/*/dataAttributeBindings/*}', - 'body': 'data_attribute_binding', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = data_taxonomy.UpdateDataAttributeBindingRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataAttributeBinding._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataTaxonomy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_taxonomy.name=projects/*/locations/*/dataTaxonomies/*}', - 'body': 'data_taxonomy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcd_data_taxonomy.UpdateDataTaxonomyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTaxonomyServiceRestTransport._BaseUpdateDataTaxonomy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDataTaxonomyServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py deleted file mode 100644 index e865ac090fc7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataplexServiceClient -from .async_client import DataplexServiceAsyncClient - -__all__ = ( - 'DataplexServiceClient', - 'DataplexServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py deleted file mode 100644 index 7abf8f9d4836..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ /dev/null @@ -1,4716 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport -from .client import DataplexServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataplexServiceAsyncClient: - """Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - """ - - _client: DataplexServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataplexServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataplexServiceClient._DEFAULT_UNIVERSE - - action_path = staticmethod(DataplexServiceClient.action_path) - parse_action_path = staticmethod(DataplexServiceClient.parse_action_path) - asset_path = staticmethod(DataplexServiceClient.asset_path) - parse_asset_path = staticmethod(DataplexServiceClient.parse_asset_path) - environment_path = staticmethod(DataplexServiceClient.environment_path) - parse_environment_path = staticmethod(DataplexServiceClient.parse_environment_path) - job_path = staticmethod(DataplexServiceClient.job_path) - parse_job_path = staticmethod(DataplexServiceClient.parse_job_path) - lake_path = staticmethod(DataplexServiceClient.lake_path) - parse_lake_path = staticmethod(DataplexServiceClient.parse_lake_path) - session_path = staticmethod(DataplexServiceClient.session_path) - parse_session_path = staticmethod(DataplexServiceClient.parse_session_path) - task_path = staticmethod(DataplexServiceClient.task_path) - parse_task_path = staticmethod(DataplexServiceClient.parse_task_path) - zone_path = staticmethod(DataplexServiceClient.zone_path) - parse_zone_path = staticmethod(DataplexServiceClient.parse_zone_path) - common_billing_account_path = staticmethod(DataplexServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataplexServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataplexServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataplexServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataplexServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataplexServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataplexServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataplexServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataplexServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataplexServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceAsyncClient: The constructed client. - """ - return DataplexServiceClient.from_service_account_info.__func__(DataplexServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceAsyncClient: The constructed client. - """ - return DataplexServiceClient.from_service_account_file.__func__(DataplexServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataplexServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataplexServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataplexServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataplexServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dataplex service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataplexServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataplexServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataplexServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "credentialsType": None, - } - ) - - async def create_lake(self, - request: Optional[Union[service.CreateLakeRequest, dict]] = None, - *, - parent: Optional[str] = None, - lake: Optional[resources.Lake] = None, - lake_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]]): - The request object. Create lake request. - parent (:class:`str`): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake (:class:`google.cloud.dataplex_v1.types.Lake`): - Required. Lake resource - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake_id (:class:`str`): - Required. Lake identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - - This corresponds to the ``lake_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, lake, lake_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateLakeRequest): - request = service.CreateLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lake is not None: - request.lake = lake - if lake_id is not None: - request.lake_id = lake_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_lake(self, - request: Optional[Union[service.UpdateLakeRequest, dict]] = None, - *, - lake: Optional[resources.Lake] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]]): - The request object. Update lake request. - lake (:class:`google.cloud.dataplex_v1.types.Lake`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [lake, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateLakeRequest): - request = service.UpdateLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if lake is not None: - request.lake = lake - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("lake.name", request.lake.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_lake(self, - request: Optional[Union[service.DeleteLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]]): - The request object. Delete lake request. - name (:class:`str`): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteLakeRequest): - request = service.DeleteLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_lakes(self, - request: Optional[Union[service.ListLakesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLakesAsyncPager: - r"""Lists lake resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]]): - The request object. List lakes request. - parent (:class:`str`): - Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager: - List lakes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakesRequest): - request = service.ListLakesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_lakes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLakesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_lake(self, - request: Optional[Union[service.GetLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Lake: - r"""Retrieves a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lake(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]]): - The request object. Get lake request. - name (:class:`str`): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Lake: - A lake is a centralized repository - for managing enterprise data across the - organization distributed across many - cloud projects, and stored in a variety - of storage services such as Google Cloud - Storage and BigQuery. The resources - attached to a lake are referred to as - managed resources. Data within these - managed resources can be structured or - unstructured. A lake provides data - admins with tools to organize, secure - and manage their data at scale, and - provides data scientists and data - engineers an integrated experience to - easily search, discover, analyze and - transform data and associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetLakeRequest): - request = service.GetLakeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_lake_actions(self, - request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLakeActionsAsyncPager: - r"""Lists action resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]]): - The request object. List lake actions request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakeActionsRequest): - request = service.ListLakeActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_lake_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLakeActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_zone(self, - request: Optional[Union[service.CreateZoneRequest, dict]] = None, - *, - parent: Optional[str] = None, - zone: Optional[resources.Zone] = None, - zone_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a zone resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]]): - The request object. Create zone request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone (:class:`google.cloud.dataplex_v1.types.Zone`): - Required. Zone resource. - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone_id (:class:`str`): - Required. Zone identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in - a project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - - This corresponds to the ``zone_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, zone, zone_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateZoneRequest): - request = service.CreateZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if zone is not None: - request.zone = zone - if zone_id is not None: - request.zone_id = zone_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_zone(self, - request: Optional[Union[service.UpdateZoneRequest, dict]] = None, - *, - zone: Optional[resources.Zone] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]]): - The request object. Update zone request. - zone (:class:`google.cloud.dataplex_v1.types.Zone`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [zone, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateZoneRequest): - request = service.UpdateZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if zone is not None: - request.zone = zone - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("zone.name", request.zone.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_zone(self, - request: Optional[Union[service.DeleteZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]]): - The request object. Delete zone request. - name (:class:`str`): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteZoneRequest): - request = service.DeleteZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_zones(self, - request: Optional[Union[service.ListZonesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListZonesAsyncPager: - r"""Lists zone resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]]): - The request object. List zones request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager: - List zones response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZonesRequest): - request = service.ListZonesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_zones] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListZonesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_zone(self, - request: Optional[Union[service.GetZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Zone: - r"""Retrieves a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = await client.get_zone(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]]): - The request object. Get zone request. - name (:class:`str`): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Zone: - A zone represents a logical group of - related assets within a lake. A zone can - be used to map to organizational - structure or represent stages of data - readiness from raw to curated. It - provides managing behavior that is - shared or inherited by all contained - assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetZoneRequest): - request = service.GetZoneRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_zone_actions(self, - request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListZoneActionsAsyncPager: - r"""Lists action resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]]): - The request object. List zone actions request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZoneActionsRequest): - request = service.ListZoneActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_zone_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListZoneActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_asset(self, - request: Optional[Union[service.CreateAssetRequest, dict]] = None, - *, - parent: Optional[str] = None, - asset: Optional[resources.Asset] = None, - asset_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]]): - The request object. Create asset request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset (:class:`google.cloud.dataplex_v1.types.Asset`): - Required. Asset resource. - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_id (:class:`str`): - Required. Asset identifier. This ID will be used to - generate names such as table names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - - This corresponds to the ``asset_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, asset, asset_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateAssetRequest): - request = service.CreateAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if asset is not None: - request.asset = asset - if asset_id is not None: - request.asset_id = asset_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_asset(self, - request: Optional[Union[service.UpdateAssetRequest, dict]] = None, - *, - asset: Optional[resources.Asset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]]): - The request object. Update asset request. - asset (:class:`google.cloud.dataplex_v1.types.Asset`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [asset, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateAssetRequest): - request = service.UpdateAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if asset is not None: - request.asset = asset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("asset.name", request.asset.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_asset(self, - request: Optional[Union[service.DeleteAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]]): - The request object. Delete asset request. - name (:class:`str`): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteAssetRequest): - request = service.DeleteAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_assets(self, - request: Optional[Union[service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: - r"""Lists asset resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]]): - The request object. List assets request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager: - List assets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetsRequest): - request = service.ListAssetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_asset(self, - request: Optional[Union[service.GetAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Asset: - r"""Retrieves an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_asset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]]): - The request object. Get asset request. - name (:class:`str`): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Asset: - An asset represents a cloud resource - that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAssetRequest): - request = service.GetAssetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_asset_actions(self, - request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetActionsAsyncPager: - r"""Lists action resources in an asset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]]): - The request object. List asset actions request. - parent (:class:`str`): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetActionsRequest): - request = service.ListAssetActionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_asset_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetActionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_task(self, - request: Optional[Union[service.CreateTaskRequest, dict]] = None, - *, - parent: Optional[str] = None, - task: Optional[tasks.Task] = None, - task_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a task resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]]): - The request object. Create task request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (:class:`google.cloud.dataplex_v1.types.Task`): - Required. Task resource. - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (:class:`str`): - Required. Task identifier. - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, task, task_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateTaskRequest): - request = service.CreateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - if task_id is not None: - request.task_id = task_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_task(self, - request: Optional[Union[service.UpdateTaskRequest, dict]] = None, - *, - task: Optional[tasks.Task] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]]): - The request object. Update task request. - task (:class:`google.cloud.dataplex_v1.types.Task`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [task, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTaskRequest): - request = service.UpdateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if task is not None: - request.task = task - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("task.name", request.task.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_task(self, - request: Optional[Union[service.DeleteTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]]): - The request object. Delete task request. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteTaskRequest): - request = service.DeleteTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: Optional[Union[service.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""Lists tasks under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]]): - The request object. List tasks request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager: - List tasks response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTasksRequest): - request = service.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: Optional[Union[service.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> tasks.Task: - r"""Get task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]]): - The request object. Get task request. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Task: - A task represents a user-visible job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTaskRequest): - request = service.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[service.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists Jobs under the given task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]]): - The request object. List jobs request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager: - List jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListJobsRequest): - request = service.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_task(self, - request: Optional[Union[service.RunTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> service.RunTaskResponse: - r"""Run an on demand execution of a Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.run_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]]): - The request object. - name (:class:`str`): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.RunTaskResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.RunTaskRequest): - request = service.RunTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[service.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> tasks.Job: - r"""Get job resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]]): - The request object. Get job request. - name (:class:`str`): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Job: - A job represents an instance of a - task. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetJobRequest): - request = service.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_job(self, - request: Optional[Union[service.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Cancel jobs running for the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_job(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]]): - The request object. Cancel task jobs. - name (:class:`str`): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CancelJobRequest): - request = service.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_environment(self, - request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - environment: Optional[analyze.Environment] = None, - environment_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create an environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]]): - The request object. Create environment request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment (:class:`google.cloud.dataplex_v1.types.Environment`): - Required. Environment resource. - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment_id (:class:`str`): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - - This corresponds to the ``environment_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, environment, environment_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateEnvironmentRequest): - request = service.CreateEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if environment is not None: - request.environment = environment - if environment_id is not None: - request.environment_id = environment_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_environment(self, - request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, - *, - environment: Optional[analyze.Environment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update the environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]]): - The request object. Update environment request. - environment (:class:`google.cloud.dataplex_v1.types.Environment`): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [environment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateEnvironmentRequest): - request = service.UpdateEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if environment is not None: - request.environment = environment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("environment.name", request.environment.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_environment(self, - request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]]): - The request object. Delete environment request. - name (:class:`str`): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteEnvironmentRequest): - request = service.DeleteEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_environments(self, - request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEnvironmentsAsyncPager: - r"""Lists environments under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]]): - The request object. List environments request. - parent (:class:`str`): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: - List environments response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListEnvironmentsRequest): - request = service.ListEnvironmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_environments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEnvironmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_environment(self, - request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Environment: - r"""Get environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_environment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]]): - The request object. Get environment request. - name (:class:`str`): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Environment: - Environment represents a user-visible - compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetEnvironmentRequest): - request = service.GetEnvironmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_sessions(self, - request: Optional[Union[service.ListSessionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsAsyncPager: - r"""Lists session resources in an environment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]]): - The request object. List sessions request. - parent (:class:`str`): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: - List sessions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListSessionsRequest): - request = service.ListSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSessionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataplexServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DataplexServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py deleted file mode 100644 index ce202c7c2fef..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ /dev/null @@ -1,5118 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataplexServiceGrpcTransport -from .transports.grpc_asyncio import DataplexServiceGrpcAsyncIOTransport -from .transports.rest import DataplexServiceRestTransport - - -class DataplexServiceClientMeta(type): - """Metaclass for the DataplexService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] - _transport_registry["grpc"] = DataplexServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataplexServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DataplexServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataplexServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataplexServiceClient(metaclass=DataplexServiceClientMeta): - """Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataplexServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataplexServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataplexServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def action_path(project: str,location: str,lake: str,action: str,) -> str: - """Returns a fully-qualified action string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) - - @staticmethod - def parse_action_path(path: str) -> Dict[str,str]: - """Parses a action path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/actions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def asset_path(project: str,location: str,lake: str,zone: str,asset: str,) -> str: - """Returns a fully-qualified asset string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) - - @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: - """Parses a asset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/assets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def environment_path(project: str,location: str,lake: str,environment: str,) -> str: - """Returns a fully-qualified environment string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) - - @staticmethod - def parse_environment_path(path: str) -> Dict[str,str]: - """Parses a environment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_path(project: str,location: str,lake: str,task: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lake_path(project: str,location: str,lake: str,) -> str: - """Returns a fully-qualified lake string.""" - return "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - - @staticmethod - def parse_lake_path(path: str) -> Dict[str,str]: - """Parses a lake path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def session_path(project: str,location: str,lake: str,environment: str,session: str,) -> str: - """Returns a fully-qualified session string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) - - @staticmethod - def parse_session_path(path: str) -> Dict[str,str]: - """Parses a session path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)/sessions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,lake: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def zone_path(project: str,location: str,lake: str,zone: str,) -> str: - """Returns a fully-qualified zone string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - - @staticmethod - def parse_zone_path(path: str) -> Dict[str,str]: - """Parses a zone path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataplexServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataplexServiceTransport, Callable[..., DataplexServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dataplex service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataplexServiceTransport,Callable[..., DataplexServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataplexServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataplexServiceClient._read_environment_variables() - self._client_cert_source = DataplexServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataplexServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataplexServiceTransport) - if transport_provided: - # transport is a DataplexServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataplexServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataplexServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataplexServiceTransport], Callable[..., DataplexServiceTransport]] = ( - DataplexServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataplexServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.DataplexServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "credentialsType": None, - } - ) - - def create_lake(self, - request: Optional[Union[service.CreateLakeRequest, dict]] = None, - *, - parent: Optional[str] = None, - lake: Optional[resources.Lake] = None, - lake_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateLakeRequest, dict]): - The request object. Create lake request. - parent (str): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Lake resource - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lake_id (str): - Required. Lake identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - - This corresponds to the ``lake_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, lake, lake_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateLakeRequest): - request = service.CreateLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lake is not None: - request.lake = lake - if lake_id is not None: - request.lake_id = lake_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_lake(self, - request: Optional[Union[service.UpdateLakeRequest, dict]] = None, - *, - lake: Optional[resources.Lake] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateLakeRequest, dict]): - The request object. Update lake request. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``lake`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Lake` A lake is a centralized repository for managing enterprise data across the - organization distributed across many cloud projects, - and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources - attached to a lake are referred to as managed - resources. Data within these managed resources can be - structured or unstructured. A lake provides data - admins with tools to organize, secure and manage - their data at scale, and provides data scientists and - data engineers an integrated experience to easily - search, discover, analyze and transform data and - associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [lake, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateLakeRequest): - request = service.UpdateLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if lake is not None: - request.lake = lake - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("lake.name", request.lake.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Lake, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_lake(self, - request: Optional[Union[service.DeleteLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteLakeRequest, dict]): - The request object. Delete lake request. - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteLakeRequest): - request = service.DeleteLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_lakes(self, - request: Optional[Union[service.ListLakesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLakesPager: - r"""Lists lake resources in a project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListLakesRequest, dict]): - The request object. List lakes request. - parent (str): - Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager: - List lakes response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakesRequest): - request = service.ListLakesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_lakes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLakesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_lake(self, - request: Optional[Union[service.GetLakeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Lake: - r"""Retrieves a lake resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = client.get_lake(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetLakeRequest, dict]): - The request object. Get lake request. - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Lake: - A lake is a centralized repository - for managing enterprise data across the - organization distributed across many - cloud projects, and stored in a variety - of storage services such as Google Cloud - Storage and BigQuery. The resources - attached to a lake are referred to as - managed resources. Data within these - managed resources can be structured or - unstructured. A lake provides data - admins with tools to organize, secure - and manage their data at scale, and - provides data scientists and data - engineers an integrated experience to - easily search, discover, analyze and - transform data and associated metadata. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetLakeRequest): - request = service.GetLakeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_lake] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_lake_actions(self, - request: Optional[Union[service.ListLakeActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLakeActionsPager: - r"""Lists action resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListLakeActionsRequest, dict]): - The request object. List lake actions request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListLakeActionsRequest): - request = service.ListLakeActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_lake_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLakeActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_zone(self, - request: Optional[Union[service.CreateZoneRequest, dict]] = None, - *, - parent: Optional[str] = None, - zone: Optional[resources.Zone] = None, - zone_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a zone resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateZoneRequest, dict]): - The request object. Create zone request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Zone resource. - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - zone_id (str): - Required. Zone identifier. This ID will be used to - generate names such as database and dataset names when - publishing metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in - a project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - - This corresponds to the ``zone_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, zone, zone_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateZoneRequest): - request = service.CreateZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if zone is not None: - request.zone = zone - if zone_id is not None: - request.zone_id = zone_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_zone(self, - request: Optional[Union[service.UpdateZoneRequest, dict]] = None, - *, - zone: Optional[resources.Zone] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateZoneRequest, dict]): - The request object. Update zone request. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``zone`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Zone` A zone represents a logical group of related assets within a lake. A zone can - be used to map to organizational structure or - represent stages of data readiness from raw to - curated. It provides managing behavior that is shared - or inherited by all contained assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [zone, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateZoneRequest): - request = service.UpdateZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if zone is not None: - request.zone = zone - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("zone.name", request.zone.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Zone, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_zone(self, - request: Optional[Union[service.DeleteZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteZoneRequest, dict]): - The request object. Delete zone request. - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteZoneRequest): - request = service.DeleteZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_zones(self, - request: Optional[Union[service.ListZonesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListZonesPager: - r"""Lists zone resources in a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListZonesRequest, dict]): - The request object. List zones request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager: - List zones response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZonesRequest): - request = service.ListZonesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_zones] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListZonesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_zone(self, - request: Optional[Union[service.GetZoneRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Zone: - r"""Retrieves a zone resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = client.get_zone(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetZoneRequest, dict]): - The request object. Get zone request. - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Zone: - A zone represents a logical group of - related assets within a lake. A zone can - be used to map to organizational - structure or represent stages of data - readiness from raw to curated. It - provides managing behavior that is - shared or inherited by all contained - assets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetZoneRequest): - request = service.GetZoneRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_zone] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_zone_actions(self, - request: Optional[Union[service.ListZoneActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListZoneActionsPager: - r"""Lists action resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListZoneActionsRequest, dict]): - The request object. List zone actions request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListZoneActionsRequest): - request = service.ListZoneActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_zone_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListZoneActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_asset(self, - request: Optional[Union[service.CreateAssetRequest, dict]] = None, - *, - parent: Optional[str] = None, - asset: Optional[resources.Asset] = None, - asset_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateAssetRequest, dict]): - The request object. Create asset request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Asset resource. - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_id (str): - Required. Asset identifier. This ID will be used to - generate names such as table names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - - This corresponds to the ``asset_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, asset, asset_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateAssetRequest): - request = service.CreateAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if asset is not None: - request.asset = asset - if asset_id is not None: - request.asset_id = asset_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_asset(self, - request: Optional[Union[service.UpdateAssetRequest, dict]] = None, - *, - asset: Optional[resources.Asset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateAssetRequest, dict]): - The request object. Update asset request. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``asset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Asset` An asset represents a cloud resource that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [asset, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateAssetRequest): - request = service.UpdateAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if asset is not None: - request.asset = asset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("asset.name", request.asset.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - resources.Asset, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_asset(self, - request: Optional[Union[service.DeleteAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteAssetRequest, dict]): - The request object. Delete asset request. - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteAssetRequest): - request = service.DeleteAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_assets(self, - request: Optional[Union[service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: - r"""Lists asset resources in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAssetsRequest, dict]): - The request object. List assets request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager: - List assets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetsRequest): - request = service.ListAssetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_asset(self, - request: Optional[Union[service.GetAssetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resources.Asset: - r"""Retrieves an asset resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = client.get_asset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetAssetRequest, dict]): - The request object. Get asset request. - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Asset: - An asset represents a cloud resource - that is being managed within a lake as a - member of a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAssetRequest): - request = service.GetAssetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_asset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_asset_actions(self, - request: Optional[Union[service.ListAssetActionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetActionsPager: - r"""Lists action resources in an asset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListAssetActionsRequest, dict]): - The request object. List asset actions request. - parent (str): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager: - List actions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListAssetActionsRequest): - request = service.ListAssetActionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_asset_actions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetActionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_task(self, - request: Optional[Union[service.CreateTaskRequest, dict]] = None, - *, - parent: Optional[str] = None, - task: Optional[tasks.Task] = None, - task_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a task resource within a lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateTaskRequest, dict]): - The request object. Create task request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (google.cloud.dataplex_v1.types.Task): - Required. Task resource. - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (str): - Required. Task identifier. - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, task, task_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateTaskRequest): - request = service.CreateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - if task_id is not None: - request.task_id = task_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_task(self, - request: Optional[Union[service.UpdateTaskRequest, dict]] = None, - *, - task: Optional[tasks.Task] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateTaskRequest, dict]): - The request object. Update task request. - task (google.cloud.dataplex_v1.types.Task): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataplex_v1.types.Task` A task - represents a user-visible job. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [task, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTaskRequest): - request = service.UpdateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if task is not None: - request.task = task - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("task.name", request.task.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - tasks.Task, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_task(self, - request: Optional[Union[service.DeleteTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteTaskRequest, dict]): - The request object. Delete task request. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteTaskRequest): - request = service.DeleteTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: Optional[Union[service.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksPager: - r"""Lists tasks under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListTasksRequest, dict]): - The request object. List tasks request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager: - List tasks response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTasksRequest): - request = service.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: Optional[Union[service.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> tasks.Task: - r"""Get task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetTaskRequest, dict]): - The request object. Get task request. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Task: - A task represents a user-visible job. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTaskRequest): - request = service.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[service.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsPager: - r"""Lists Jobs under the given task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListJobsRequest, dict]): - The request object. List jobs request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager: - List jobs response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListJobsRequest): - request = service.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_task(self, - request: Optional[Union[service.RunTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> service.RunTaskResponse: - r"""Run an on demand execution of a Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = client.run_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]): - The request object. - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.RunTaskResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.RunTaskRequest): - request = service.RunTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[service.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> tasks.Job: - r"""Get job resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetJobRequest, dict]): - The request object. Get job request. - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Job: - A job represents an instance of a - task. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetJobRequest): - request = service.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_job(self, - request: Optional[Union[service.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Cancel jobs running for the task resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_job(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.CancelJobRequest, dict]): - The request object. Cancel task jobs. - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CancelJobRequest): - request = service.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_environment(self, - request: Optional[Union[service.CreateEnvironmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - environment: Optional[analyze.Environment] = None, - environment_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create an environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): - The request object. Create environment request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Environment resource. - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - environment_id (str): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and - hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - - This corresponds to the ``environment_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, environment, environment_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateEnvironmentRequest): - request = service.CreateEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if environment is not None: - request.environment = environment - if environment_id is not None: - request.environment_id = environment_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_environment(self, - request: Optional[Union[service.UpdateEnvironmentRequest, dict]] = None, - *, - environment: Optional[analyze.Environment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update the environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): - The request object. Update environment request. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - - This corresponds to the ``environment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [environment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateEnvironmentRequest): - request = service.UpdateEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if environment is not None: - request.environment = environment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("environment.name", request.environment.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyze.Environment, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_environment(self, - request: Optional[Union[service.DeleteEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): - The request object. Delete environment request. - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteEnvironmentRequest): - request = service.DeleteEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=service.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_environments(self, - request: Optional[Union[service.ListEnvironmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEnvironmentsPager: - r"""Lists environments under the given lake. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): - The request object. List environments request. - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: - List environments response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListEnvironmentsRequest): - request = service.ListEnvironmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_environments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEnvironmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_environment(self, - request: Optional[Union[service.GetEnvironmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyze.Environment: - r"""Get environment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_environment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): - The request object. Get environment request. - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Environment: - Environment represents a user-visible - compute infrastructure for analytics - within a lake. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetEnvironmentRequest): - request = service.GetEnvironmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_environment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_sessions(self, - request: Optional[Union[service.ListSessionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsPager: - r"""Lists session resources in an environment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): - The request object. List sessions request. - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: - List sessions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListSessionsRequest): - request = service.ListSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSessionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataplexServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DataplexServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py deleted file mode 100644 index 718f88da504d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/pagers.py +++ /dev/null @@ -1,1420 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks - - -class ListLakesPager: - """A pager for iterating through ``list_lakes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``lakes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLakes`` requests and continue to iterate - through the ``lakes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListLakesResponse], - request: service.ListLakesRequest, - response: service.ListLakesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListLakesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListLakesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListLakesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Lake]: - for page in self.pages: - yield from page.lakes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakesAsyncPager: - """A pager for iterating through ``list_lakes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListLakesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``lakes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLakes`` requests and continue to iterate - through the ``lakes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListLakesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListLakesResponse]], - request: service.ListLakesRequest, - response: service.ListLakesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListLakesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListLakesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListLakesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Lake]: - async def async_generator(): - async for page in self.pages: - for response in page.lakes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakeActionsPager: - """A pager for iterating through ``list_lake_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLakeActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListLakeActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListLakeActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLakeActionsAsyncPager: - """A pager for iterating through ``list_lake_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLakeActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListLakeActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListLakeActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListLakeActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZonesPager: - """A pager for iterating through ``list_zones`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``zones`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListZones`` requests and continue to iterate - through the ``zones`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListZonesResponse], - request: service.ListZonesRequest, - response: service.ListZonesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZonesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListZonesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListZonesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListZonesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Zone]: - for page in self.pages: - yield from page.zones - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZonesAsyncPager: - """A pager for iterating through ``list_zones`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListZonesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``zones`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListZones`` requests and continue to iterate - through the ``zones`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListZonesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListZonesResponse]], - request: service.ListZonesRequest, - response: service.ListZonesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZonesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListZonesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListZonesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListZonesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Zone]: - async def async_generator(): - async for page in self.pages: - for response in page.zones: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZoneActionsPager: - """A pager for iterating through ``list_zone_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListZoneActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListZoneActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListZoneActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListZoneActionsAsyncPager: - """A pager for iterating through ``list_zone_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListZoneActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListZoneActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListZoneActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListZoneActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListAssetsResponse], - request: service.ListAssetsRequest, - response: service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Asset]: - for page in self.pages: - yield from page.assets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsAsyncPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListAssetsResponse]], - request: service.ListAssetsRequest, - response: service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Asset]: - async def async_generator(): - async for page in self.pages: - for response in page.assets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetActionsPager: - """A pager for iterating through ``list_asset_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssetActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListActionsResponse], - request: service.ListAssetActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListAssetActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Action]: - for page in self.pages: - yield from page.actions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetActionsAsyncPager: - """A pager for iterating through ``list_asset_actions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListActionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``actions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssetActions`` requests and continue to iterate - through the ``actions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListActionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListActionsResponse]], - request: service.ListAssetActionsRequest, - response: service.ListActionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListAssetActionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListActionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListAssetActionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListActionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Action]: - async def async_generator(): - async for page in self.pages: - for response in page.actions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListTasksResponse], - request: service.ListTasksRequest, - response: service.ListTasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tasks.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListTasksResponse]], - request: service.ListTasksRequest, - response: service.ListTasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tasks.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListJobsResponse], - request: service.ListJobsRequest, - response: service.ListJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tasks.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListJobsResponse]], - request: service.ListJobsRequest, - response: service.ListJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tasks.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEnvironmentsPager: - """A pager for iterating through ``list_environments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``environments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEnvironments`` requests and continue to iterate - through the ``environments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListEnvironmentsResponse], - request: service.ListEnvironmentsRequest, - response: service.ListEnvironmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListEnvironmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListEnvironmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Environment]: - for page in self.pages: - yield from page.environments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEnvironmentsAsyncPager: - """A pager for iterating through ``list_environments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``environments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEnvironments`` requests and continue to iterate - through the ``environments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], - request: service.ListEnvironmentsRequest, - response: service.ListEnvironmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListEnvironmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListEnvironmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Environment]: - async def async_generator(): - async for page in self.pages: - for response in page.environments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListSessionsResponse], - request: service.ListSessionsRequest, - response: service.ListSessionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyze.Session]: - for page in self.pages: - yield from page.sessions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsAsyncPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListSessionsResponse]], - request: service.ListSessionsRequest, - response: service.ListSessionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyze.Session]: - async def async_generator(): - async for page in self.pages: - for response in page.sessions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst deleted file mode 100644 index a70e22115784..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataplexServiceTransport` is the ABC for all transports. -- public child `DataplexServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataplexServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataplexServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataplexServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py deleted file mode 100644 index e68c264bc640..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataplexServiceTransport -from .grpc import DataplexServiceGrpcTransport -from .grpc_asyncio import DataplexServiceGrpcAsyncIOTransport -from .rest import DataplexServiceRestTransport -from .rest import DataplexServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataplexServiceTransport]] -_transport_registry['grpc'] = DataplexServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataplexServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DataplexServiceRestTransport - -__all__ = ( - 'DataplexServiceTransport', - 'DataplexServiceGrpcTransport', - 'DataplexServiceGrpcAsyncIOTransport', - 'DataplexServiceRestTransport', - 'DataplexServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py deleted file mode 100644 index 3a63297e464e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py +++ /dev/null @@ -1,838 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataplexServiceTransport(abc.ABC): - """Abstract transport class for DataplexService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_lake: gapic_v1.method.wrap_method( - self.create_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.update_lake: gapic_v1.method.wrap_method( - self.update_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_lake: gapic_v1.method.wrap_method( - self.delete_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.list_lakes: gapic_v1.method.wrap_method( - self.list_lakes, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_lake: gapic_v1.method.wrap_method( - self.get_lake, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_lake_actions: gapic_v1.method.wrap_method( - self.list_lake_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_zone: gapic_v1.method.wrap_method( - self.create_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.update_zone: gapic_v1.method.wrap_method( - self.update_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_zone: gapic_v1.method.wrap_method( - self.delete_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.list_zones: gapic_v1.method.wrap_method( - self.list_zones, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_zone: gapic_v1.method.wrap_method( - self.get_zone, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_zone_actions: gapic_v1.method.wrap_method( - self.list_zone_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_asset: gapic_v1.method.wrap_method( - self.create_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_asset: gapic_v1.method.wrap_method( - self.update_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_asset: gapic_v1.method.wrap_method( - self.delete_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: gapic_v1.method.wrap_method( - self.list_assets, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_asset: gapic_v1.method.wrap_method( - self.get_asset, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_asset_actions: gapic_v1.method.wrap_method( - self.list_asset_actions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_task: gapic_v1.method.wrap_method( - self.create_task, - default_timeout=60.0, - client_info=client_info, - ), - self.update_task: gapic_v1.method.wrap_method( - self.update_task, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_task: gapic_v1.method.wrap_method( - self.delete_task, - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_task: gapic_v1.method.wrap_method( - self.run_task, - default_timeout=None, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_environment: gapic_v1.method.wrap_method( - self.create_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.update_environment: gapic_v1.method.wrap_method( - self.update_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_environment: gapic_v1.method.wrap_method( - self.delete_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.list_environments: gapic_v1.method.wrap_method( - self.list_environments, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_environment: gapic_v1.method.wrap_method( - self.get_environment, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_sessions: gapic_v1.method.wrap_method( - self.list_sessions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - Union[ - service.ListLakesResponse, - Awaitable[service.ListLakesResponse] - ]]: - raise NotImplementedError() - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - Union[ - resources.Lake, - Awaitable[resources.Lake] - ]]: - raise NotImplementedError() - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - Union[ - service.ListZonesResponse, - Awaitable[service.ListZonesResponse] - ]]: - raise NotImplementedError() - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - Union[ - resources.Zone, - Awaitable[resources.Zone] - ]]: - raise NotImplementedError() - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - Union[ - service.ListAssetsResponse, - Awaitable[service.ListAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - Union[ - resources.Asset, - Awaitable[resources.Asset] - ]]: - raise NotImplementedError() - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - Union[ - service.ListActionsResponse, - Awaitable[service.ListActionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - Union[ - service.ListTasksResponse, - Awaitable[service.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - Union[ - tasks.Task, - Awaitable[tasks.Task] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - Union[ - service.ListJobsResponse, - Awaitable[service.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - Union[ - service.RunTaskResponse, - Awaitable[service.RunTaskResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - Union[ - tasks.Job, - Awaitable[tasks.Job] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - Union[ - service.ListEnvironmentsResponse, - Awaitable[service.ListEnvironmentsResponse] - ]]: - raise NotImplementedError() - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - Union[ - analyze.Environment, - Awaitable[analyze.Environment] - ]]: - raise NotImplementedError() - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - Union[ - service.ListSessionsResponse, - Awaitable[service.ListSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataplexServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py deleted file mode 100644 index 743313b14313..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py +++ /dev/null @@ -1,1323 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataplexServiceGrpcTransport(DataplexServiceTransport): - """gRPC backend transport for DataplexService. - - Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the create lake method over gRPC. - - Creates a lake resource. - - Returns: - Callable[[~.CreateLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lake' not in self._stubs: - self._stubs['create_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateLake', - request_serializer=service.CreateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_lake'] - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update lake method over gRPC. - - Updates a lake resource. - - Returns: - Callable[[~.UpdateLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_lake' not in self._stubs: - self._stubs['update_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateLake', - request_serializer=service.UpdateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_lake'] - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete lake method over gRPC. - - Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - Returns: - Callable[[~.DeleteLakeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lake' not in self._stubs: - self._stubs['delete_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteLake', - request_serializer=service.DeleteLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_lake'] - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - service.ListLakesResponse]: - r"""Return a callable for the list lakes method over gRPC. - - Lists lake resources in a project and location. - - Returns: - Callable[[~.ListLakesRequest], - ~.ListLakesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lakes' not in self._stubs: - self._stubs['list_lakes'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakes', - request_serializer=service.ListLakesRequest.serialize, - response_deserializer=service.ListLakesResponse.deserialize, - ) - return self._stubs['list_lakes'] - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - resources.Lake]: - r"""Return a callable for the get lake method over gRPC. - - Retrieves a lake resource. - - Returns: - Callable[[~.GetLakeRequest], - ~.Lake]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lake' not in self._stubs: - self._stubs['get_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetLake', - request_serializer=service.GetLakeRequest.serialize, - response_deserializer=resources.Lake.deserialize, - ) - return self._stubs['get_lake'] - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list lake actions method over gRPC. - - Lists action resources in a lake. - - Returns: - Callable[[~.ListLakeActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lake_actions' not in self._stubs: - self._stubs['list_lake_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', - request_serializer=service.ListLakeActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_lake_actions'] - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the create zone method over gRPC. - - Creates a zone resource within a lake. - - Returns: - Callable[[~.CreateZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_zone' not in self._stubs: - self._stubs['create_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateZone', - request_serializer=service.CreateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_zone'] - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the update zone method over gRPC. - - Updates a zone resource. - - Returns: - Callable[[~.UpdateZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_zone' not in self._stubs: - self._stubs['update_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateZone', - request_serializer=service.UpdateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_zone'] - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete zone method over gRPC. - - Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - Returns: - Callable[[~.DeleteZoneRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_zone' not in self._stubs: - self._stubs['delete_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteZone', - request_serializer=service.DeleteZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_zone'] - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - service.ListZonesResponse]: - r"""Return a callable for the list zones method over gRPC. - - Lists zone resources in a lake. - - Returns: - Callable[[~.ListZonesRequest], - ~.ListZonesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zones' not in self._stubs: - self._stubs['list_zones'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZones', - request_serializer=service.ListZonesRequest.serialize, - response_deserializer=service.ListZonesResponse.deserialize, - ) - return self._stubs['list_zones'] - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - resources.Zone]: - r"""Return a callable for the get zone method over gRPC. - - Retrieves a zone resource. - - Returns: - Callable[[~.GetZoneRequest], - ~.Zone]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_zone' not in self._stubs: - self._stubs['get_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetZone', - request_serializer=service.GetZoneRequest.serialize, - response_deserializer=resources.Zone.deserialize, - ) - return self._stubs['get_zone'] - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list zone actions method over gRPC. - - Lists action resources in a zone. - - Returns: - Callable[[~.ListZoneActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zone_actions' not in self._stubs: - self._stubs['list_zone_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', - request_serializer=service.ListZoneActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_zone_actions'] - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the create asset method over gRPC. - - Creates an asset resource. - - Returns: - Callable[[~.CreateAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_asset' not in self._stubs: - self._stubs['create_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateAsset', - request_serializer=service.CreateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_asset'] - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the update asset method over gRPC. - - Updates an asset resource. - - Returns: - Callable[[~.UpdateAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_asset' not in self._stubs: - self._stubs['update_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', - request_serializer=service.UpdateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_asset'] - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete asset method over gRPC. - - Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - Returns: - Callable[[~.DeleteAssetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_asset' not in self._stubs: - self._stubs['delete_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', - request_serializer=service.DeleteAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_asset'] - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - service.ListAssetsResponse]: - r"""Return a callable for the list assets method over gRPC. - - Lists asset resources in a zone. - - Returns: - Callable[[~.ListAssetsRequest], - ~.ListAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssets', - request_serializer=service.ListAssetsRequest.serialize, - response_deserializer=service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - resources.Asset]: - r"""Return a callable for the get asset method over gRPC. - - Retrieves an asset resource. - - Returns: - Callable[[~.GetAssetRequest], - ~.Asset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_asset' not in self._stubs: - self._stubs['get_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetAsset', - request_serializer=service.GetAssetRequest.serialize, - response_deserializer=resources.Asset.deserialize, - ) - return self._stubs['get_asset'] - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - service.ListActionsResponse]: - r"""Return a callable for the list asset actions method over gRPC. - - Lists action resources in an asset. - - Returns: - Callable[[~.ListAssetActionsRequest], - ~.ListActionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_asset_actions' not in self._stubs: - self._stubs['list_asset_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', - request_serializer=service.ListAssetActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_asset_actions'] - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the create task method over gRPC. - - Creates a task resource within a lake. - - Returns: - Callable[[~.CreateTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateTask', - request_serializer=service.CreateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_task'] - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the update task method over gRPC. - - Update the task resource. - - Returns: - Callable[[~.UpdateTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_task' not in self._stubs: - self._stubs['update_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateTask', - request_serializer=service.UpdateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_task'] - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete task method over gRPC. - - Delete the task resource. - - Returns: - Callable[[~.DeleteTaskRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteTask', - request_serializer=service.DeleteTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_task'] - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - service.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - Lists tasks under the given lake. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListTasks', - request_serializer=service.ListTasksRequest.serialize, - response_deserializer=service.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - tasks.Task]: - r"""Return a callable for the get task method over gRPC. - - Get task resource. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetTask', - request_serializer=service.GetTaskRequest.serialize, - response_deserializer=tasks.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - service.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs under the given task. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListJobs', - request_serializer=service.ListJobsRequest.serialize, - response_deserializer=service.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - service.RunTaskResponse]: - r"""Return a callable for the run task method over gRPC. - - Run an on demand execution of a Task. - - Returns: - Callable[[~.RunTaskRequest], - ~.RunTaskResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/RunTask', - request_serializer=service.RunTaskRequest.serialize, - response_deserializer=service.RunTaskResponse.deserialize, - ) - return self._stubs['run_task'] - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - tasks.Job]: - r"""Return a callable for the get job method over gRPC. - - Get job resource. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetJob', - request_serializer=service.GetJobRequest.serialize, - response_deserializer=tasks.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel jobs running for the task resource. - - Returns: - Callable[[~.CancelJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CancelJob', - request_serializer=service.CancelJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_job'] - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the create environment method over gRPC. - - Create an environment resource. - - Returns: - Callable[[~.CreateEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_environment' not in self._stubs: - self._stubs['create_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', - request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_environment'] - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the update environment method over gRPC. - - Update the environment resource. - - Returns: - Callable[[~.UpdateEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_environment' not in self._stubs: - self._stubs['update_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', - request_serializer=service.UpdateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_environment'] - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete environment method over gRPC. - - Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - Returns: - Callable[[~.DeleteEnvironmentRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_environment' not in self._stubs: - self._stubs['delete_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', - request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_environment'] - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - service.ListEnvironmentsResponse]: - r"""Return a callable for the list environments method over gRPC. - - Lists environments under the given lake. - - Returns: - Callable[[~.ListEnvironmentsRequest], - ~.ListEnvironmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_environments' not in self._stubs: - self._stubs['list_environments'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', - request_serializer=service.ListEnvironmentsRequest.serialize, - response_deserializer=service.ListEnvironmentsResponse.deserialize, - ) - return self._stubs['list_environments'] - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - analyze.Environment]: - r"""Return a callable for the get environment method over gRPC. - - Get environment resource. - - Returns: - Callable[[~.GetEnvironmentRequest], - ~.Environment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_environment' not in self._stubs: - self._stubs['get_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', - request_serializer=service.GetEnvironmentRequest.serialize, - response_deserializer=analyze.Environment.deserialize, - ) - return self._stubs['get_environment'] - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - service.ListSessionsResponse]: - r"""Return a callable for the list sessions method over gRPC. - - Lists session resources in an environment. - - Returns: - Callable[[~.ListSessionsRequest], - ~.ListSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListSessions', - request_serializer=service.ListSessionsRequest.serialize, - response_deserializer=service.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataplexServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py deleted file mode 100644 index bc8ec6359a6d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1669 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataplexServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataplexServiceGrpcAsyncIOTransport(DataplexServiceTransport): - """gRPC AsyncIO backend transport for DataplexService. - - Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create lake method over gRPC. - - Creates a lake resource. - - Returns: - Callable[[~.CreateLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lake' not in self._stubs: - self._stubs['create_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateLake', - request_serializer=service.CreateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_lake'] - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update lake method over gRPC. - - Updates a lake resource. - - Returns: - Callable[[~.UpdateLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_lake' not in self._stubs: - self._stubs['update_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateLake', - request_serializer=service.UpdateLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_lake'] - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete lake method over gRPC. - - Deletes a lake resource. All zones within the lake - must be deleted before the lake can be deleted. - - Returns: - Callable[[~.DeleteLakeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lake' not in self._stubs: - self._stubs['delete_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteLake', - request_serializer=service.DeleteLakeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_lake'] - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - Awaitable[service.ListLakesResponse]]: - r"""Return a callable for the list lakes method over gRPC. - - Lists lake resources in a project and location. - - Returns: - Callable[[~.ListLakesRequest], - Awaitable[~.ListLakesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lakes' not in self._stubs: - self._stubs['list_lakes'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakes', - request_serializer=service.ListLakesRequest.serialize, - response_deserializer=service.ListLakesResponse.deserialize, - ) - return self._stubs['list_lakes'] - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - Awaitable[resources.Lake]]: - r"""Return a callable for the get lake method over gRPC. - - Retrieves a lake resource. - - Returns: - Callable[[~.GetLakeRequest], - Awaitable[~.Lake]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lake' not in self._stubs: - self._stubs['get_lake'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetLake', - request_serializer=service.GetLakeRequest.serialize, - response_deserializer=resources.Lake.deserialize, - ) - return self._stubs['get_lake'] - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list lake actions method over gRPC. - - Lists action resources in a lake. - - Returns: - Callable[[~.ListLakeActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_lake_actions' not in self._stubs: - self._stubs['list_lake_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListLakeActions', - request_serializer=service.ListLakeActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_lake_actions'] - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create zone method over gRPC. - - Creates a zone resource within a lake. - - Returns: - Callable[[~.CreateZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_zone' not in self._stubs: - self._stubs['create_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateZone', - request_serializer=service.CreateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_zone'] - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update zone method over gRPC. - - Updates a zone resource. - - Returns: - Callable[[~.UpdateZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_zone' not in self._stubs: - self._stubs['update_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateZone', - request_serializer=service.UpdateZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_zone'] - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete zone method over gRPC. - - Deletes a zone resource. All assets within a zone - must be deleted before the zone can be deleted. - - Returns: - Callable[[~.DeleteZoneRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_zone' not in self._stubs: - self._stubs['delete_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteZone', - request_serializer=service.DeleteZoneRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_zone'] - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - Awaitable[service.ListZonesResponse]]: - r"""Return a callable for the list zones method over gRPC. - - Lists zone resources in a lake. - - Returns: - Callable[[~.ListZonesRequest], - Awaitable[~.ListZonesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zones' not in self._stubs: - self._stubs['list_zones'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZones', - request_serializer=service.ListZonesRequest.serialize, - response_deserializer=service.ListZonesResponse.deserialize, - ) - return self._stubs['list_zones'] - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - Awaitable[resources.Zone]]: - r"""Return a callable for the get zone method over gRPC. - - Retrieves a zone resource. - - Returns: - Callable[[~.GetZoneRequest], - Awaitable[~.Zone]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_zone' not in self._stubs: - self._stubs['get_zone'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetZone', - request_serializer=service.GetZoneRequest.serialize, - response_deserializer=resources.Zone.deserialize, - ) - return self._stubs['get_zone'] - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list zone actions method over gRPC. - - Lists action resources in a zone. - - Returns: - Callable[[~.ListZoneActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_zone_actions' not in self._stubs: - self._stubs['list_zone_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListZoneActions', - request_serializer=service.ListZoneActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_zone_actions'] - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create asset method over gRPC. - - Creates an asset resource. - - Returns: - Callable[[~.CreateAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_asset' not in self._stubs: - self._stubs['create_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateAsset', - request_serializer=service.CreateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_asset'] - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update asset method over gRPC. - - Updates an asset resource. - - Returns: - Callable[[~.UpdateAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_asset' not in self._stubs: - self._stubs['update_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateAsset', - request_serializer=service.UpdateAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_asset'] - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete asset method over gRPC. - - Deletes an asset resource. The referenced storage - resource is detached (default) or deleted based on the - associated Lifecycle policy. - - Returns: - Callable[[~.DeleteAssetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_asset' not in self._stubs: - self._stubs['delete_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteAsset', - request_serializer=service.DeleteAssetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_asset'] - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - Awaitable[service.ListAssetsResponse]]: - r"""Return a callable for the list assets method over gRPC. - - Lists asset resources in a zone. - - Returns: - Callable[[~.ListAssetsRequest], - Awaitable[~.ListAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssets', - request_serializer=service.ListAssetsRequest.serialize, - response_deserializer=service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - Awaitable[resources.Asset]]: - r"""Return a callable for the get asset method over gRPC. - - Retrieves an asset resource. - - Returns: - Callable[[~.GetAssetRequest], - Awaitable[~.Asset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_asset' not in self._stubs: - self._stubs['get_asset'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetAsset', - request_serializer=service.GetAssetRequest.serialize, - response_deserializer=resources.Asset.deserialize, - ) - return self._stubs['get_asset'] - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - Awaitable[service.ListActionsResponse]]: - r"""Return a callable for the list asset actions method over gRPC. - - Lists action resources in an asset. - - Returns: - Callable[[~.ListAssetActionsRequest], - Awaitable[~.ListActionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_asset_actions' not in self._stubs: - self._stubs['list_asset_actions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListAssetActions', - request_serializer=service.ListAssetActionsRequest.serialize, - response_deserializer=service.ListActionsResponse.deserialize, - ) - return self._stubs['list_asset_actions'] - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create task method over gRPC. - - Creates a task resource within a lake. - - Returns: - Callable[[~.CreateTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateTask', - request_serializer=service.CreateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_task'] - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update task method over gRPC. - - Update the task resource. - - Returns: - Callable[[~.UpdateTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_task' not in self._stubs: - self._stubs['update_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateTask', - request_serializer=service.UpdateTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_task'] - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete task method over gRPC. - - Delete the task resource. - - Returns: - Callable[[~.DeleteTaskRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteTask', - request_serializer=service.DeleteTaskRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_task'] - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - Awaitable[service.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - Lists tasks under the given lake. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListTasks', - request_serializer=service.ListTasksRequest.serialize, - response_deserializer=service.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - Awaitable[tasks.Task]]: - r"""Return a callable for the get task method over gRPC. - - Get task resource. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetTask', - request_serializer=service.GetTaskRequest.serialize, - response_deserializer=tasks.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - Awaitable[service.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists Jobs under the given task. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListJobs', - request_serializer=service.ListJobsRequest.serialize, - response_deserializer=service.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - Awaitable[service.RunTaskResponse]]: - r"""Return a callable for the run task method over gRPC. - - Run an on demand execution of a Task. - - Returns: - Callable[[~.RunTaskRequest], - Awaitable[~.RunTaskResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/RunTask', - request_serializer=service.RunTaskRequest.serialize, - response_deserializer=service.RunTaskResponse.deserialize, - ) - return self._stubs['run_task'] - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - Awaitable[tasks.Job]]: - r"""Return a callable for the get job method over gRPC. - - Get job resource. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetJob', - request_serializer=service.GetJobRequest.serialize, - response_deserializer=tasks.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel jobs running for the task resource. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CancelJob', - request_serializer=service.CancelJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_job'] - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create environment method over gRPC. - - Create an environment resource. - - Returns: - Callable[[~.CreateEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_environment' not in self._stubs: - self._stubs['create_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/CreateEnvironment', - request_serializer=service.CreateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_environment'] - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update environment method over gRPC. - - Update the environment resource. - - Returns: - Callable[[~.UpdateEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_environment' not in self._stubs: - self._stubs['update_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment', - request_serializer=service.UpdateEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_environment'] - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete environment method over gRPC. - - Delete the environment resource. All the child - resources must have been deleted before environment - deletion can be initiated. - - Returns: - Callable[[~.DeleteEnvironmentRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_environment' not in self._stubs: - self._stubs['delete_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment', - request_serializer=service.DeleteEnvironmentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_environment'] - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - Awaitable[service.ListEnvironmentsResponse]]: - r"""Return a callable for the list environments method over gRPC. - - Lists environments under the given lake. - - Returns: - Callable[[~.ListEnvironmentsRequest], - Awaitable[~.ListEnvironmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_environments' not in self._stubs: - self._stubs['list_environments'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListEnvironments', - request_serializer=service.ListEnvironmentsRequest.serialize, - response_deserializer=service.ListEnvironmentsResponse.deserialize, - ) - return self._stubs['list_environments'] - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - Awaitable[analyze.Environment]]: - r"""Return a callable for the get environment method over gRPC. - - Get environment resource. - - Returns: - Callable[[~.GetEnvironmentRequest], - Awaitable[~.Environment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_environment' not in self._stubs: - self._stubs['get_environment'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/GetEnvironment', - request_serializer=service.GetEnvironmentRequest.serialize, - response_deserializer=analyze.Environment.deserialize, - ) - return self._stubs['get_environment'] - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - Awaitable[service.ListSessionsResponse]]: - r"""Return a callable for the list sessions method over gRPC. - - Lists session resources in an environment. - - Returns: - Callable[[~.ListSessionsRequest], - Awaitable[~.ListSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.DataplexService/ListSessions', - request_serializer=service.ListSessionsRequest.serialize, - response_deserializer=service.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_lake: self._wrap_method( - self.create_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.update_lake: self._wrap_method( - self.update_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_lake: self._wrap_method( - self.delete_lake, - default_timeout=60.0, - client_info=client_info, - ), - self.list_lakes: self._wrap_method( - self.list_lakes, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_lake: self._wrap_method( - self.get_lake, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_lake_actions: self._wrap_method( - self.list_lake_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_zone: self._wrap_method( - self.create_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.update_zone: self._wrap_method( - self.update_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_zone: self._wrap_method( - self.delete_zone, - default_timeout=60.0, - client_info=client_info, - ), - self.list_zones: self._wrap_method( - self.list_zones, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_zone: self._wrap_method( - self.get_zone, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_zone_actions: self._wrap_method( - self.list_zone_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_asset: self._wrap_method( - self.create_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.update_asset: self._wrap_method( - self.update_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_asset: self._wrap_method( - self.delete_asset, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: self._wrap_method( - self.list_assets, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_asset: self._wrap_method( - self.get_asset, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_asset_actions: self._wrap_method( - self.list_asset_actions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_task: self._wrap_method( - self.create_task, - default_timeout=60.0, - client_info=client_info, - ), - self.update_task: self._wrap_method( - self.update_task, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_task: self._wrap_method( - self.delete_task, - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: self._wrap_method( - self.list_tasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: self._wrap_method( - self.get_task, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: self._wrap_method( - self.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_task: self._wrap_method( - self.run_task, - default_timeout=None, - client_info=client_info, - ), - self.get_job: self._wrap_method( - self.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: self._wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_environment: self._wrap_method( - self.create_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.update_environment: self._wrap_method( - self.update_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_environment: self._wrap_method( - self.delete_environment, - default_timeout=60.0, - client_info=client_info, - ), - self.list_environments: self._wrap_method( - self.list_environments, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_environment: self._wrap_method( - self.get_environment, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_sessions: self._wrap_method( - self.list_sessions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataplexServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py deleted file mode 100644 index cb83f1bbf4d0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py +++ /dev/null @@ -1,6707 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseDataplexServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DataplexServiceRestInterceptor: - """Interceptor for DataplexService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DataplexServiceRestTransport. - - .. code-block:: python - class MyCustomDataplexServiceInterceptor(DataplexServiceRestInterceptor): - def pre_cancel_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_create_asset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_asset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_environment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_environment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_lake(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_lake(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_zone(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_zone(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_asset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_asset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_environment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_environment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_lake(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_lake(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_zone(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_zone(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_asset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_asset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_environment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_environment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_lake(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_lake(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_zone(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_zone(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_asset_actions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_asset_actions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_assets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_environments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_environments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_lake_actions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_lake_actions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_lakes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_lakes(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_sessions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_sessions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_tasks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_tasks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_zone_actions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_zone_actions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_zones(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_zones(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_asset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_asset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_environment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_environment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_lake(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_lake(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_zone(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_zone(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DataplexServiceRestTransport(interceptor=MyCustomDataplexServiceInterceptor()) - client = DataplexServiceClient(transport=transport) - - - """ - def pre_cancel_job(self, request: service.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def pre_create_asset(self, request: service.CreateAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_asset - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_create_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_asset - - DEPRECATED. Please use the `post_create_asset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_create_asset` interceptor runs - before the `post_create_asset_with_metadata` interceptor. - """ - return response - - def post_create_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_asset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_create_asset_with_metadata` - interceptor in new development instead of the `post_create_asset` interceptor. - When both interceptors are used, this `post_create_asset_with_metadata` interceptor runs after the - `post_create_asset` interceptor. The (possibly modified) response returned by - `post_create_asset` will be passed to - `post_create_asset_with_metadata`. - """ - return response, metadata - - def pre_create_environment(self, request: service.CreateEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_environment - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_create_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_environment - - DEPRECATED. Please use the `post_create_environment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_create_environment` interceptor runs - before the `post_create_environment_with_metadata` interceptor. - """ - return response - - def post_create_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_environment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_create_environment_with_metadata` - interceptor in new development instead of the `post_create_environment` interceptor. - When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the - `post_create_environment` interceptor. The (possibly modified) response returned by - `post_create_environment` will be passed to - `post_create_environment_with_metadata`. - """ - return response, metadata - - def pre_create_lake(self, request: service.CreateLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_lake - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_create_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_lake - - DEPRECATED. Please use the `post_create_lake_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_create_lake` interceptor runs - before the `post_create_lake_with_metadata` interceptor. - """ - return response - - def post_create_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_lake - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_create_lake_with_metadata` - interceptor in new development instead of the `post_create_lake` interceptor. - When both interceptors are used, this `post_create_lake_with_metadata` interceptor runs after the - `post_create_lake` interceptor. The (possibly modified) response returned by - `post_create_lake` will be passed to - `post_create_lake_with_metadata`. - """ - return response, metadata - - def pre_create_task(self, request: service.CreateTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_create_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_task - - DEPRECATED. Please use the `post_create_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_create_task` interceptor runs - before the `post_create_task_with_metadata` interceptor. - """ - return response - - def post_create_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_create_task_with_metadata` - interceptor in new development instead of the `post_create_task` interceptor. - When both interceptors are used, this `post_create_task_with_metadata` interceptor runs after the - `post_create_task` interceptor. The (possibly modified) response returned by - `post_create_task` will be passed to - `post_create_task_with_metadata`. - """ - return response, metadata - - def pre_create_zone(self, request: service.CreateZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_zone - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_create_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_zone - - DEPRECATED. Please use the `post_create_zone_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_create_zone` interceptor runs - before the `post_create_zone_with_metadata` interceptor. - """ - return response - - def post_create_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_zone - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_create_zone_with_metadata` - interceptor in new development instead of the `post_create_zone` interceptor. - When both interceptors are used, this `post_create_zone_with_metadata` interceptor runs after the - `post_create_zone` interceptor. The (possibly modified) response returned by - `post_create_zone` will be passed to - `post_create_zone_with_metadata`. - """ - return response, metadata - - def pre_delete_asset(self, request: service.DeleteAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_asset - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_asset - - DEPRECATED. Please use the `post_delete_asset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_delete_asset` interceptor runs - before the `post_delete_asset_with_metadata` interceptor. - """ - return response - - def post_delete_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_asset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_delete_asset_with_metadata` - interceptor in new development instead of the `post_delete_asset` interceptor. - When both interceptors are used, this `post_delete_asset_with_metadata` interceptor runs after the - `post_delete_asset` interceptor. The (possibly modified) response returned by - `post_delete_asset` will be passed to - `post_delete_asset_with_metadata`. - """ - return response, metadata - - def pre_delete_environment(self, request: service.DeleteEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_environment - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_environment - - DEPRECATED. Please use the `post_delete_environment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_delete_environment` interceptor runs - before the `post_delete_environment_with_metadata` interceptor. - """ - return response - - def post_delete_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_environment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_delete_environment_with_metadata` - interceptor in new development instead of the `post_delete_environment` interceptor. - When both interceptors are used, this `post_delete_environment_with_metadata` interceptor runs after the - `post_delete_environment` interceptor. The (possibly modified) response returned by - `post_delete_environment` will be passed to - `post_delete_environment_with_metadata`. - """ - return response, metadata - - def pre_delete_lake(self, request: service.DeleteLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_lake - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_lake - - DEPRECATED. Please use the `post_delete_lake_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_delete_lake` interceptor runs - before the `post_delete_lake_with_metadata` interceptor. - """ - return response - - def post_delete_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_lake - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_delete_lake_with_metadata` - interceptor in new development instead of the `post_delete_lake` interceptor. - When both interceptors are used, this `post_delete_lake_with_metadata` interceptor runs after the - `post_delete_lake` interceptor. The (possibly modified) response returned by - `post_delete_lake` will be passed to - `post_delete_lake_with_metadata`. - """ - return response, metadata - - def pre_delete_task(self, request: service.DeleteTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_task - - DEPRECATED. Please use the `post_delete_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_delete_task` interceptor runs - before the `post_delete_task_with_metadata` interceptor. - """ - return response - - def post_delete_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_delete_task_with_metadata` - interceptor in new development instead of the `post_delete_task` interceptor. - When both interceptors are used, this `post_delete_task_with_metadata` interceptor runs after the - `post_delete_task` interceptor. The (possibly modified) response returned by - `post_delete_task` will be passed to - `post_delete_task_with_metadata`. - """ - return response, metadata - - def pre_delete_zone(self, request: service.DeleteZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_zone - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_zone - - DEPRECATED. Please use the `post_delete_zone_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_delete_zone` interceptor runs - before the `post_delete_zone_with_metadata` interceptor. - """ - return response - - def post_delete_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_zone - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_delete_zone_with_metadata` - interceptor in new development instead of the `post_delete_zone` interceptor. - When both interceptors are used, this `post_delete_zone_with_metadata` interceptor runs after the - `post_delete_zone` interceptor. The (possibly modified) response returned by - `post_delete_zone` will be passed to - `post_delete_zone_with_metadata`. - """ - return response, metadata - - def pre_get_asset(self, request: service.GetAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_asset - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_asset(self, response: resources.Asset) -> resources.Asset: - """Post-rpc interceptor for get_asset - - DEPRECATED. Please use the `post_get_asset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_asset` interceptor runs - before the `post_get_asset_with_metadata` interceptor. - """ - return response - - def post_get_asset_with_metadata(self, response: resources.Asset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Asset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_asset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_asset_with_metadata` - interceptor in new development instead of the `post_get_asset` interceptor. - When both interceptors are used, this `post_get_asset_with_metadata` interceptor runs after the - `post_get_asset` interceptor. The (possibly modified) response returned by - `post_get_asset` will be passed to - `post_get_asset_with_metadata`. - """ - return response, metadata - - def pre_get_environment(self, request: service.GetEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_environment - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_environment(self, response: analyze.Environment) -> analyze.Environment: - """Post-rpc interceptor for get_environment - - DEPRECATED. Please use the `post_get_environment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_environment` interceptor runs - before the `post_get_environment_with_metadata` interceptor. - """ - return response - - def post_get_environment_with_metadata(self, response: analyze.Environment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[analyze.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_environment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_environment_with_metadata` - interceptor in new development instead of the `post_get_environment` interceptor. - When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the - `post_get_environment` interceptor. The (possibly modified) response returned by - `post_get_environment` will be passed to - `post_get_environment_with_metadata`. - """ - return response, metadata - - def pre_get_job(self, request: service.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_job(self, response: tasks.Job) -> tasks.Job: - """Post-rpc interceptor for get_job - - DEPRECATED. Please use the `post_get_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_job` interceptor runs - before the `post_get_job_with_metadata` interceptor. - """ - return response - - def post_get_job_with_metadata(self, response: tasks.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[tasks.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_job_with_metadata` - interceptor in new development instead of the `post_get_job` interceptor. - When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the - `post_get_job` interceptor. The (possibly modified) response returned by - `post_get_job` will be passed to - `post_get_job_with_metadata`. - """ - return response, metadata - - def pre_get_lake(self, request: service.GetLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_lake - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_lake(self, response: resources.Lake) -> resources.Lake: - """Post-rpc interceptor for get_lake - - DEPRECATED. Please use the `post_get_lake_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_lake` interceptor runs - before the `post_get_lake_with_metadata` interceptor. - """ - return response - - def post_get_lake_with_metadata(self, response: resources.Lake, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Lake, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_lake - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_lake_with_metadata` - interceptor in new development instead of the `post_get_lake` interceptor. - When both interceptors are used, this `post_get_lake_with_metadata` interceptor runs after the - `post_get_lake` interceptor. The (possibly modified) response returned by - `post_get_lake` will be passed to - `post_get_lake_with_metadata`. - """ - return response, metadata - - def pre_get_task(self, request: service.GetTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_task(self, response: tasks.Task) -> tasks.Task: - """Post-rpc interceptor for get_task - - DEPRECATED. Please use the `post_get_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_task` interceptor runs - before the `post_get_task_with_metadata` interceptor. - """ - return response - - def post_get_task_with_metadata(self, response: tasks.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[tasks.Task, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_task_with_metadata` - interceptor in new development instead of the `post_get_task` interceptor. - When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the - `post_get_task` interceptor. The (possibly modified) response returned by - `post_get_task` will be passed to - `post_get_task_with_metadata`. - """ - return response, metadata - - def pre_get_zone(self, request: service.GetZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_zone - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_zone(self, response: resources.Zone) -> resources.Zone: - """Post-rpc interceptor for get_zone - - DEPRECATED. Please use the `post_get_zone_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_get_zone` interceptor runs - before the `post_get_zone_with_metadata` interceptor. - """ - return response - - def post_get_zone_with_metadata(self, response: resources.Zone, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resources.Zone, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_zone - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_get_zone_with_metadata` - interceptor in new development instead of the `post_get_zone` interceptor. - When both interceptors are used, this `post_get_zone_with_metadata` interceptor runs after the - `post_get_zone` interceptor. The (possibly modified) response returned by - `post_get_zone` will be passed to - `post_get_zone_with_metadata`. - """ - return response, metadata - - def pre_list_asset_actions(self, request: service.ListAssetActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_asset_actions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_asset_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: - """Post-rpc interceptor for list_asset_actions - - DEPRECATED. Please use the `post_list_asset_actions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_asset_actions` interceptor runs - before the `post_list_asset_actions_with_metadata` interceptor. - """ - return response - - def post_list_asset_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_asset_actions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_asset_actions_with_metadata` - interceptor in new development instead of the `post_list_asset_actions` interceptor. - When both interceptors are used, this `post_list_asset_actions_with_metadata` interceptor runs after the - `post_list_asset_actions` interceptor. The (possibly modified) response returned by - `post_list_asset_actions` will be passed to - `post_list_asset_actions_with_metadata`. - """ - return response, metadata - - def pre_list_assets(self, request: service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_assets(self, response: service.ListAssetsResponse) -> service.ListAssetsResponse: - """Post-rpc interceptor for list_assets - - DEPRECATED. Please use the `post_list_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_assets` interceptor runs - before the `post_list_assets_with_metadata` interceptor. - """ - return response - - def post_list_assets_with_metadata(self, response: service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_assets_with_metadata` - interceptor in new development instead of the `post_list_assets` interceptor. - When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the - `post_list_assets` interceptor. The (possibly modified) response returned by - `post_list_assets` will be passed to - `post_list_assets_with_metadata`. - """ - return response, metadata - - def pre_list_environments(self, request: service.ListEnvironmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListEnvironmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_environments - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_environments(self, response: service.ListEnvironmentsResponse) -> service.ListEnvironmentsResponse: - """Post-rpc interceptor for list_environments - - DEPRECATED. Please use the `post_list_environments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_environments` interceptor runs - before the `post_list_environments_with_metadata` interceptor. - """ - return response - - def post_list_environments_with_metadata(self, response: service.ListEnvironmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_environments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_environments_with_metadata` - interceptor in new development instead of the `post_list_environments` interceptor. - When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the - `post_list_environments` interceptor. The (possibly modified) response returned by - `post_list_environments` will be passed to - `post_list_environments_with_metadata`. - """ - return response, metadata - - def pre_list_jobs(self, request: service.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_jobs(self, response: service.ListJobsResponse) -> service.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - DEPRECATED. Please use the `post_list_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_jobs` interceptor runs - before the `post_list_jobs_with_metadata` interceptor. - """ - return response - - def post_list_jobs_with_metadata(self, response: service.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_jobs_with_metadata` - interceptor in new development instead of the `post_list_jobs` interceptor. - When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the - `post_list_jobs` interceptor. The (possibly modified) response returned by - `post_list_jobs` will be passed to - `post_list_jobs_with_metadata`. - """ - return response, metadata - - def pre_list_lake_actions(self, request: service.ListLakeActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakeActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_lake_actions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_lake_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: - """Post-rpc interceptor for list_lake_actions - - DEPRECATED. Please use the `post_list_lake_actions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_lake_actions` interceptor runs - before the `post_list_lake_actions_with_metadata` interceptor. - """ - return response - - def post_list_lake_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_lake_actions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_lake_actions_with_metadata` - interceptor in new development instead of the `post_list_lake_actions` interceptor. - When both interceptors are used, this `post_list_lake_actions_with_metadata` interceptor runs after the - `post_list_lake_actions` interceptor. The (possibly modified) response returned by - `post_list_lake_actions` will be passed to - `post_list_lake_actions_with_metadata`. - """ - return response, metadata - - def pre_list_lakes(self, request: service.ListLakesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_lakes - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_lakes(self, response: service.ListLakesResponse) -> service.ListLakesResponse: - """Post-rpc interceptor for list_lakes - - DEPRECATED. Please use the `post_list_lakes_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_lakes` interceptor runs - before the `post_list_lakes_with_metadata` interceptor. - """ - return response - - def post_list_lakes_with_metadata(self, response: service.ListLakesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListLakesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_lakes - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_lakes_with_metadata` - interceptor in new development instead of the `post_list_lakes` interceptor. - When both interceptors are used, this `post_list_lakes_with_metadata` interceptor runs after the - `post_list_lakes` interceptor. The (possibly modified) response returned by - `post_list_lakes` will be passed to - `post_list_lakes_with_metadata`. - """ - return response, metadata - - def pre_list_sessions(self, request: service.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_sessions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_sessions(self, response: service.ListSessionsResponse) -> service.ListSessionsResponse: - """Post-rpc interceptor for list_sessions - - DEPRECATED. Please use the `post_list_sessions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_sessions` interceptor runs - before the `post_list_sessions_with_metadata` interceptor. - """ - return response - - def post_list_sessions_with_metadata(self, response: service.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_sessions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_sessions_with_metadata` - interceptor in new development instead of the `post_list_sessions` interceptor. - When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the - `post_list_sessions` interceptor. The (possibly modified) response returned by - `post_list_sessions` will be passed to - `post_list_sessions_with_metadata`. - """ - return response, metadata - - def pre_list_tasks(self, request: service.ListTasksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_tasks - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_tasks(self, response: service.ListTasksResponse) -> service.ListTasksResponse: - """Post-rpc interceptor for list_tasks - - DEPRECATED. Please use the `post_list_tasks_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_tasks` interceptor runs - before the `post_list_tasks_with_metadata` interceptor. - """ - return response - - def post_list_tasks_with_metadata(self, response: service.ListTasksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_tasks - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_tasks_with_metadata` - interceptor in new development instead of the `post_list_tasks` interceptor. - When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the - `post_list_tasks` interceptor. The (possibly modified) response returned by - `post_list_tasks` will be passed to - `post_list_tasks_with_metadata`. - """ - return response, metadata - - def pre_list_zone_actions(self, request: service.ListZoneActionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZoneActionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_zone_actions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_zone_actions(self, response: service.ListActionsResponse) -> service.ListActionsResponse: - """Post-rpc interceptor for list_zone_actions - - DEPRECATED. Please use the `post_list_zone_actions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_zone_actions` interceptor runs - before the `post_list_zone_actions_with_metadata` interceptor. - """ - return response - - def post_list_zone_actions_with_metadata(self, response: service.ListActionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListActionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_zone_actions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_zone_actions_with_metadata` - interceptor in new development instead of the `post_list_zone_actions` interceptor. - When both interceptors are used, this `post_list_zone_actions_with_metadata` interceptor runs after the - `post_list_zone_actions` interceptor. The (possibly modified) response returned by - `post_list_zone_actions` will be passed to - `post_list_zone_actions_with_metadata`. - """ - return response, metadata - - def pre_list_zones(self, request: service.ListZonesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZonesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_zones - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_zones(self, response: service.ListZonesResponse) -> service.ListZonesResponse: - """Post-rpc interceptor for list_zones - - DEPRECATED. Please use the `post_list_zones_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_list_zones` interceptor runs - before the `post_list_zones_with_metadata` interceptor. - """ - return response - - def post_list_zones_with_metadata(self, response: service.ListZonesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListZonesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_zones - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_list_zones_with_metadata` - interceptor in new development instead of the `post_list_zones` interceptor. - When both interceptors are used, this `post_list_zones_with_metadata` interceptor runs after the - `post_list_zones` interceptor. The (possibly modified) response returned by - `post_list_zones` will be passed to - `post_list_zones_with_metadata`. - """ - return response, metadata - - def pre_run_task(self, request: service.RunTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.RunTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for run_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_run_task(self, response: service.RunTaskResponse) -> service.RunTaskResponse: - """Post-rpc interceptor for run_task - - DEPRECATED. Please use the `post_run_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_run_task` interceptor runs - before the `post_run_task_with_metadata` interceptor. - """ - return response - - def post_run_task_with_metadata(self, response: service.RunTaskResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.RunTaskResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for run_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_run_task_with_metadata` - interceptor in new development instead of the `post_run_task` interceptor. - When both interceptors are used, this `post_run_task_with_metadata` interceptor runs after the - `post_run_task` interceptor. The (possibly modified) response returned by - `post_run_task` will be passed to - `post_run_task_with_metadata`. - """ - return response, metadata - - def pre_update_asset(self, request: service.UpdateAssetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateAssetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_asset - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_update_asset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_asset - - DEPRECATED. Please use the `post_update_asset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_update_asset` interceptor runs - before the `post_update_asset_with_metadata` interceptor. - """ - return response - - def post_update_asset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_asset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_update_asset_with_metadata` - interceptor in new development instead of the `post_update_asset` interceptor. - When both interceptors are used, this `post_update_asset_with_metadata` interceptor runs after the - `post_update_asset` interceptor. The (possibly modified) response returned by - `post_update_asset` will be passed to - `post_update_asset_with_metadata`. - """ - return response, metadata - - def pre_update_environment(self, request: service.UpdateEnvironmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateEnvironmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_environment - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_update_environment(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_environment - - DEPRECATED. Please use the `post_update_environment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_update_environment` interceptor runs - before the `post_update_environment_with_metadata` interceptor. - """ - return response - - def post_update_environment_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_environment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_update_environment_with_metadata` - interceptor in new development instead of the `post_update_environment` interceptor. - When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the - `post_update_environment` interceptor. The (possibly modified) response returned by - `post_update_environment` will be passed to - `post_update_environment_with_metadata`. - """ - return response, metadata - - def pre_update_lake(self, request: service.UpdateLakeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateLakeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_lake - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_update_lake(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_lake - - DEPRECATED. Please use the `post_update_lake_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_update_lake` interceptor runs - before the `post_update_lake_with_metadata` interceptor. - """ - return response - - def post_update_lake_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_lake - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_update_lake_with_metadata` - interceptor in new development instead of the `post_update_lake` interceptor. - When both interceptors are used, this `post_update_lake_with_metadata` interceptor runs after the - `post_update_lake` interceptor. The (possibly modified) response returned by - `post_update_lake` will be passed to - `post_update_lake_with_metadata`. - """ - return response, metadata - - def pre_update_task(self, request: service.UpdateTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_update_task(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_task - - DEPRECATED. Please use the `post_update_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_update_task` interceptor runs - before the `post_update_task_with_metadata` interceptor. - """ - return response - - def post_update_task_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_update_task_with_metadata` - interceptor in new development instead of the `post_update_task` interceptor. - When both interceptors are used, this `post_update_task_with_metadata` interceptor runs after the - `post_update_task` interceptor. The (possibly modified) response returned by - `post_update_task` will be passed to - `post_update_task_with_metadata`. - """ - return response, metadata - - def pre_update_zone(self, request: service.UpdateZoneRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateZoneRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_zone - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_update_zone(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_zone - - DEPRECATED. Please use the `post_update_zone_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. This `post_update_zone` interceptor runs - before the `post_update_zone_with_metadata` interceptor. - """ - return response - - def post_update_zone_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_zone - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataplexService server but before it is returned to user code. - - We recommend only using this `post_update_zone_with_metadata` - interceptor in new development instead of the `post_update_zone` interceptor. - When both interceptors are used, this `post_update_zone_with_metadata` interceptor runs after the - `post_update_zone` interceptor. The (possibly modified) response returned by - `post_update_zone` will be passed to - `post_update_zone_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataplexService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DataplexService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DataplexServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DataplexServiceRestInterceptor - - -class DataplexServiceRestTransport(_BaseDataplexServiceRestTransport): - """REST backend synchronous transport for DataplexService. - - Dataplex service provides data lakes as a service. The - primary resources offered by this service are Lakes, Zones and - Assets which collectively allow a data administrator to - organize, manage, secure and catalog data across their - organization located across cloud projects in a variety of - storage systems including Cloud Storage and BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DataplexServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DataplexServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelJob(_BaseDataplexServiceRestTransport._BaseCancelJob, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CancelJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CancelJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the cancel job method over HTTP. - - Args: - request (~.service.CancelJobRequest): - The request object. Cancel task jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCancelJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_job(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CancelJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CancelJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _CreateAsset(_BaseDataplexServiceRestTransport._BaseCreateAsset, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CreateAsset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateAssetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create asset method over HTTP. - - Args: - request (~.service.CreateAssetRequest): - The request object. Create asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_http_options() - - request, metadata = self._interceptor.pre_create_asset(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCreateAsset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateAsset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateAsset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CreateAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_asset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_asset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_asset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateAsset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateEnvironment(_BaseDataplexServiceRestTransport._BaseCreateEnvironment, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CreateEnvironment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateEnvironmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create environment method over HTTP. - - Args: - request (~.service.CreateEnvironmentRequest): - The request object. Create environment request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_http_options() - - request, metadata = self._interceptor.pre_create_environment(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateEnvironment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateEnvironment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CreateEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_environment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_environment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_environment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateEnvironment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateLake(_BaseDataplexServiceRestTransport._BaseCreateLake, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CreateLake") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateLakeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create lake method over HTTP. - - Args: - request (~.service.CreateLakeRequest): - The request object. Create lake request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCreateLake._get_http_options() - - request, metadata = self._interceptor.pre_create_lake(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateLake._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCreateLake._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCreateLake._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateLake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateLake", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CreateLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_lake(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_lake_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_lake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateLake", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateTask(_BaseDataplexServiceRestTransport._BaseCreateTask, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CreateTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create task method over HTTP. - - Args: - request (~.service.CreateTaskRequest): - The request object. Create task request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCreateTask._get_http_options() - - request, metadata = self._interceptor.pre_create_task(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateTask._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCreateTask._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCreateTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateTask", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CreateTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_task", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateZone(_BaseDataplexServiceRestTransport._BaseCreateZone, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CreateZone") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateZoneRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create zone method over HTTP. - - Args: - request (~.service.CreateZoneRequest): - The request object. Create zone request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCreateZone._get_http_options() - - request, metadata = self._interceptor.pre_create_zone(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCreateZone._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCreateZone._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCreateZone._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CreateZone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateZone", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CreateZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_zone(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_zone_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.create_zone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CreateZone", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAsset(_BaseDataplexServiceRestTransport._BaseDeleteAsset, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteAsset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteAssetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete asset method over HTTP. - - Args: - request (~.service.DeleteAssetRequest): - The request object. Delete asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_http_options() - - request, metadata = self._interceptor.pre_delete_asset(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteAsset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteAsset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteAsset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_asset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_asset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteAsset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEnvironment(_BaseDataplexServiceRestTransport._BaseDeleteEnvironment, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteEnvironment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteEnvironmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete environment method over HTTP. - - Args: - request (~.service.DeleteEnvironmentRequest): - The request object. Delete environment request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_http_options() - - request, metadata = self._interceptor.pre_delete_environment(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteEnvironment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteEnvironment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_environment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_environment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteEnvironment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteLake(_BaseDataplexServiceRestTransport._BaseDeleteLake, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteLake") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteLakeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete lake method over HTTP. - - Args: - request (~.service.DeleteLakeRequest): - The request object. Delete lake request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_http_options() - - request, metadata = self._interceptor.pre_delete_lake(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteLake._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteLake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteLake", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_lake(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_lake_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteLake", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteTask(_BaseDataplexServiceRestTransport._BaseDeleteTask, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete task method over HTTP. - - Args: - request (~.service.DeleteTaskRequest): - The request object. Delete task request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_http_options() - - request, metadata = self._interceptor.pre_delete_task(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteTask", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_task", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteZone(_BaseDataplexServiceRestTransport._BaseDeleteZone, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteZone") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteZoneRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete zone method over HTTP. - - Args: - request (~.service.DeleteZoneRequest): - The request object. Delete zone request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_http_options() - - request, metadata = self._interceptor.pre_delete_zone(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteZone._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteZone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteZone", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_zone(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_zone_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteZone", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAsset(_BaseDataplexServiceRestTransport._BaseGetAsset, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetAsset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetAssetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> resources.Asset: - r"""Call the get asset method over HTTP. - - Args: - request (~.service.GetAssetRequest): - The request object. Get asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.resources.Asset: - An asset represents a cloud resource - that is being managed within a lake as a - member of a zone. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetAsset._get_http_options() - - request, metadata = self._interceptor.pre_get_asset(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetAsset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetAsset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetAsset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetAsset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = resources.Asset() - pb_resp = resources.Asset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_asset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_asset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = resources.Asset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_asset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetAsset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetEnvironment(_BaseDataplexServiceRestTransport._BaseGetEnvironment, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetEnvironment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetEnvironmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> analyze.Environment: - r"""Call the get environment method over HTTP. - - Args: - request (~.service.GetEnvironmentRequest): - The request object. Get environment request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.analyze.Environment: - Environment represents a user-visible - compute infrastructure for analytics - within a lake. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_http_options() - - request, metadata = self._interceptor.pre_get_environment(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetEnvironment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetEnvironment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetEnvironment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = analyze.Environment() - pb_resp = analyze.Environment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_environment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_environment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = analyze.Environment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_environment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetEnvironment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetJob(_BaseDataplexServiceRestTransport._BaseGetJob, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> tasks.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.service.GetJobRequest): - The request object. Get job request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.tasks.Job: - A job represents an instance of a - task. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetJob._get_http_options() - - request, metadata = self._interceptor.pre_get_job(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetJob", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = tasks.Job() - pb_resp = tasks.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = tasks.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_job", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetLake(_BaseDataplexServiceRestTransport._BaseGetLake, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetLake") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetLakeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> resources.Lake: - r"""Call the get lake method over HTTP. - - Args: - request (~.service.GetLakeRequest): - The request object. Get lake request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.resources.Lake: - A lake is a centralized repository - for managing enterprise data across the - organization distributed across many - cloud projects, and stored in a variety - of storage services such as Google Cloud - Storage and BigQuery. The resources - attached to a lake are referred to as - managed resources. Data within these - managed resources can be structured or - unstructured. A lake provides data - admins with tools to organize, secure - and manage their data at scale, and - provides data scientists and data - engineers an integrated experience to - easily search, discover, analyze and - transform data and associated metadata. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetLake._get_http_options() - - request, metadata = self._interceptor.pre_get_lake(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetLake._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetLake._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetLake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetLake", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = resources.Lake() - pb_resp = resources.Lake.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_lake(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_lake_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = resources.Lake.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_lake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetLake", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTask(_BaseDataplexServiceRestTransport._BaseGetTask, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> tasks.Task: - r"""Call the get task method over HTTP. - - Args: - request (~.service.GetTaskRequest): - The request object. Get task request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.tasks.Task: - A task represents a user-visible job. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetTask._get_http_options() - - request, metadata = self._interceptor.pre_get_task(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetTask._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetTask", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = tasks.Task() - pb_resp = tasks.Task.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = tasks.Task.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_task", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetZone(_BaseDataplexServiceRestTransport._BaseGetZone, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetZone") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetZoneRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> resources.Zone: - r"""Call the get zone method over HTTP. - - Args: - request (~.service.GetZoneRequest): - The request object. Get zone request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.resources.Zone: - A zone represents a logical group of - related assets within a lake. A zone can - be used to map to organizational - structure or represent stages of data - readiness from raw to curated. It - provides managing behavior that is - shared or inherited by all contained - assets. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetZone._get_http_options() - - request, metadata = self._interceptor.pre_get_zone(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetZone._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetZone._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetZone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetZone", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = resources.Zone() - pb_resp = resources.Zone.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_zone(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_zone_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = resources.Zone.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.get_zone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetZone", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAssetActions(_BaseDataplexServiceRestTransport._BaseListAssetActions, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListAssetActions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListAssetActionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListActionsResponse: - r"""Call the list asset actions method over HTTP. - - Args: - request (~.service.ListAssetActionsRequest): - The request object. List asset actions request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListActionsResponse: - List actions response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_http_options() - - request, metadata = self._interceptor.pre_list_asset_actions(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListAssetActions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListAssetActions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListAssetActions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListAssetActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListActionsResponse() - pb_resp = service.ListActionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_asset_actions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_asset_actions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListActionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListAssetActions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAssets(_BaseDataplexServiceRestTransport._BaseListAssets, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListAssetsResponse: - r"""Call the list assets method over HTTP. - - Args: - request (~.service.ListAssetsRequest): - The request object. List assets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListAssetsResponse: - List assets response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListAssets._get_http_options() - - request, metadata = self._interceptor.pre_list_assets(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListAssets", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListAssetsResponse() - pb_resp = service.ListAssetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListAssetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_assets", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEnvironments(_BaseDataplexServiceRestTransport._BaseListEnvironments, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListEnvironments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListEnvironmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListEnvironmentsResponse: - r"""Call the list environments method over HTTP. - - Args: - request (~.service.ListEnvironmentsRequest): - The request object. List environments request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListEnvironmentsResponse: - List environments response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_http_options() - - request, metadata = self._interceptor.pre_list_environments(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListEnvironments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListEnvironments", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListEnvironments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListEnvironments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListEnvironmentsResponse() - pb_resp = service.ListEnvironmentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_environments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_environments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListEnvironmentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_environments", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListEnvironments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListJobs(_BaseDataplexServiceRestTransport._BaseListJobs, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.service.ListJobsRequest): - The request object. List jobs request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListJobsResponse: - List jobs response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListJobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListJobsResponse() - pb_resp = service.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListLakeActions(_BaseDataplexServiceRestTransport._BaseListLakeActions, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListLakeActions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListLakeActionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListActionsResponse: - r"""Call the list lake actions method over HTTP. - - Args: - request (~.service.ListLakeActionsRequest): - The request object. List lake actions request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListActionsResponse: - List actions response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_http_options() - - request, metadata = self._interceptor.pre_list_lake_actions(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListLakeActions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLakeActions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLakeActions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListLakeActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListActionsResponse() - pb_resp = service.ListActionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_lake_actions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_lake_actions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListActionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLakeActions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListLakes(_BaseDataplexServiceRestTransport._BaseListLakes, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListLakes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListLakesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListLakesResponse: - r"""Call the list lakes method over HTTP. - - Args: - request (~.service.ListLakesRequest): - The request object. List lakes request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListLakesResponse: - List lakes response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListLakes._get_http_options() - - request, metadata = self._interceptor.pre_list_lakes(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListLakes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListLakes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLakes", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLakes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListLakes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListLakesResponse() - pb_resp = service.ListLakesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_lakes(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_lakes_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListLakesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLakes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListSessions(_BaseDataplexServiceRestTransport._BaseListSessions, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListSessions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListSessionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListSessionsResponse: - r"""Call the list sessions method over HTTP. - - Args: - request (~.service.ListSessionsRequest): - The request object. List sessions request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListSessionsResponse: - List sessions response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListSessions._get_http_options() - - request, metadata = self._interceptor.pre_list_sessions(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListSessions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListSessions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListSessions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListSessions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListSessionsResponse() - pb_resp = service.ListSessionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_sessions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListSessionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListSessions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTasks(_BaseDataplexServiceRestTransport._BaseListTasks, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListTasks") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListTasksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListTasksResponse: - r"""Call the list tasks method over HTTP. - - Args: - request (~.service.ListTasksRequest): - The request object. List tasks request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListTasksResponse: - List tasks response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListTasks._get_http_options() - - request, metadata = self._interceptor.pre_list_tasks(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListTasks._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListTasks._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListTasks", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListTasks", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListTasks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListTasksResponse() - pb_resp = service.ListTasksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_tasks(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_tasks_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListTasksResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListTasks", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListZoneActions(_BaseDataplexServiceRestTransport._BaseListZoneActions, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListZoneActions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListZoneActionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListActionsResponse: - r"""Call the list zone actions method over HTTP. - - Args: - request (~.service.ListZoneActionsRequest): - The request object. List zone actions request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListActionsResponse: - List actions response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_http_options() - - request, metadata = self._interceptor.pre_list_zone_actions(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListZoneActions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListZoneActions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListZoneActions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListZoneActions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListActionsResponse() - pb_resp = service.ListActionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_zone_actions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_zone_actions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListActionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListZoneActions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListZones(_BaseDataplexServiceRestTransport._BaseListZones, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListZones") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListZonesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListZonesResponse: - r"""Call the list zones method over HTTP. - - Args: - request (~.service.ListZonesRequest): - The request object. List zones request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListZonesResponse: - List zones response. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListZones._get_http_options() - - request, metadata = self._interceptor.pre_list_zones(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListZones._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListZones._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListZones", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListZones", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListZones._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListZonesResponse() - pb_resp = service.ListZonesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_zones(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_zones_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListZonesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.list_zones", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListZones", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RunTask(_BaseDataplexServiceRestTransport._BaseRunTask, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.RunTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.RunTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.RunTaskResponse: - r"""Call the run task method over HTTP. - - Args: - request (~.service.RunTaskRequest): - The request object. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.RunTaskResponse: - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseRunTask._get_http_options() - - request, metadata = self._interceptor.pre_run_task(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseRunTask._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseRunTask._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseRunTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.RunTask", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "RunTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._RunTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.RunTaskResponse() - pb_resp = service.RunTaskResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_run_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_run_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.RunTaskResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.run_task", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "RunTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateAsset(_BaseDataplexServiceRestTransport._BaseUpdateAsset, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.UpdateAsset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateAssetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update asset method over HTTP. - - Args: - request (~.service.UpdateAssetRequest): - The request object. Update asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_http_options() - - request, metadata = self._interceptor.pre_update_asset(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseUpdateAsset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateAsset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateAsset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._UpdateAsset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_asset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_asset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_asset", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateAsset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEnvironment(_BaseDataplexServiceRestTransport._BaseUpdateEnvironment, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.UpdateEnvironment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateEnvironmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update environment method over HTTP. - - Args: - request (~.service.UpdateEnvironmentRequest): - The request object. Update environment request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_http_options() - - request, metadata = self._interceptor.pre_update_environment(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateEnvironment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateEnvironment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._UpdateEnvironment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_environment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_environment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_environment", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateEnvironment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateLake(_BaseDataplexServiceRestTransport._BaseUpdateLake, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.UpdateLake") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateLakeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update lake method over HTTP. - - Args: - request (~.service.UpdateLakeRequest): - The request object. Update lake request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_http_options() - - request, metadata = self._interceptor.pre_update_lake(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseUpdateLake._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateLake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateLake", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._UpdateLake._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_lake(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_lake_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_lake", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateLake", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateTask(_BaseDataplexServiceRestTransport._BaseUpdateTask, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.UpdateTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update task method over HTTP. - - Args: - request (~.service.UpdateTaskRequest): - The request object. Update task request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_http_options() - - request, metadata = self._interceptor.pre_update_task(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseUpdateTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateTask", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._UpdateTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_task", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateZone(_BaseDataplexServiceRestTransport._BaseUpdateZone, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.UpdateZone") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateZoneRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update zone method over HTTP. - - Args: - request (~.service.UpdateZoneRequest): - The request object. Update zone request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_http_options() - - request, metadata = self._interceptor.pre_update_zone(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseUpdateZone._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.UpdateZone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateZone", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._UpdateZone._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_zone(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_zone_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceClient.update_zone", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "UpdateZone", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def cancel_job(self) -> Callable[ - [service.CancelJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_asset(self) -> Callable[ - [service.CreateAssetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAsset(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_environment(self) -> Callable[ - [service.CreateEnvironmentRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEnvironment(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_lake(self) -> Callable[ - [service.CreateLakeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateLake(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_task(self) -> Callable[ - [service.CreateTaskRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_zone(self) -> Callable[ - [service.CreateZoneRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateZone(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_asset(self) -> Callable[ - [service.DeleteAssetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAsset(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_environment(self) -> Callable[ - [service.DeleteEnvironmentRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEnvironment(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_lake(self) -> Callable[ - [service.DeleteLakeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLake(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_task(self) -> Callable[ - [service.DeleteTaskRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_zone(self) -> Callable[ - [service.DeleteZoneRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteZone(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_asset(self) -> Callable[ - [service.GetAssetRequest], - resources.Asset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAsset(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_environment(self) -> Callable[ - [service.GetEnvironmentRequest], - analyze.Environment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEnvironment(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [service.GetJobRequest], - tasks.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_lake(self) -> Callable[ - [service.GetLakeRequest], - resources.Lake]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLake(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_task(self) -> Callable[ - [service.GetTaskRequest], - tasks.Task]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_zone(self) -> Callable[ - [service.GetZoneRequest], - resources.Zone]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetZone(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_asset_actions(self) -> Callable[ - [service.ListAssetActionsRequest], - service.ListActionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAssetActions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_assets(self) -> Callable[ - [service.ListAssetsRequest], - service.ListAssetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_environments(self) -> Callable[ - [service.ListEnvironmentsRequest], - service.ListEnvironmentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEnvironments(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [service.ListJobsRequest], - service.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_lake_actions(self) -> Callable[ - [service.ListLakeActionsRequest], - service.ListActionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLakeActions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_lakes(self) -> Callable[ - [service.ListLakesRequest], - service.ListLakesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLakes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_sessions(self) -> Callable[ - [service.ListSessionsRequest], - service.ListSessionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_tasks(self) -> Callable[ - [service.ListTasksRequest], - service.ListTasksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_zone_actions(self) -> Callable[ - [service.ListZoneActionsRequest], - service.ListActionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListZoneActions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_zones(self) -> Callable[ - [service.ListZonesRequest], - service.ListZonesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListZones(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_task(self) -> Callable[ - [service.RunTaskRequest], - service.RunTaskResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_asset(self) -> Callable[ - [service.UpdateAssetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAsset(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_environment(self) -> Callable[ - [service.UpdateEnvironmentRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEnvironment(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_lake(self) -> Callable[ - [service.UpdateLakeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateLake(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_task(self) -> Callable[ - [service.UpdateTaskRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_zone(self) -> Callable[ - [service.UpdateZoneRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateZone(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseDataplexServiceRestTransport._BaseGetLocation, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseDataplexServiceRestTransport._BaseListLocations, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseDataplexServiceRestTransport._BaseCancelOperation, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseDataplexServiceRestTransport._BaseDeleteOperation, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseDataplexServiceRestTransport._BaseGetOperation, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseDataplexServiceRestTransport._BaseListOperations, DataplexServiceRestStub): - def __hash__(self): - return hash("DataplexServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseDataplexServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseDataplexServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataplexServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.DataplexServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataplexServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.DataplexServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.DataplexService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DataplexServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py deleted file mode 100644 index e0aa68e5b753..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py +++ /dev/null @@ -1,1612 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import DataplexServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseDataplexServiceRestTransport(DataplexServiceTransport): - """Base REST backend transport for DataplexService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCancelJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CancelJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateAsset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "assetId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets', - 'body': 'asset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateAssetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCreateAsset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateEnvironment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "environmentId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/environments', - 'body': 'environment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateEnvironmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCreateEnvironment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateLake: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "lakeId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/lakes', - 'body': 'lake', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateLakeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCreateLake._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "taskId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/tasks', - 'body': 'task', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCreateTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateZone: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "zoneId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/zones', - 'body': 'zone', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateZoneRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseCreateZone._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAsset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteAssetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteAsset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEnvironment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/environments/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteEnvironmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteEnvironment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteLake: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteLakeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteLake._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteZone: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteZoneRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseDeleteZone._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAsset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetAssetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetAsset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEnvironment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/environments/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetEnvironmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetEnvironment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLake: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetLakeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetLake._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetZone: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetZoneRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseGetZone._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAssetActions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/assets/*}/actions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListAssetActionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListAssetActions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEnvironments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/environments', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListEnvironmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListEnvironments._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/tasks/*}/jobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListJobs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListLakeActions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/actions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListLakeActionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListLakeActions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListLakes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/lakes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListLakesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListLakes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListSessions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/environments/*}/sessions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListSessionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListSessions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTasks: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/tasks', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListTasksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListTasks._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListZoneActions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/actions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListZoneActionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListZoneActions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListZones: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*}/zones', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListZonesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseListZones._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRunTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/tasks/*}:run', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.RunTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseRunTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateAsset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{asset.name=projects/*/locations/*/lakes/*/zones/*/assets/*}', - 'body': 'asset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateAssetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateAsset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEnvironment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{environment.name=projects/*/locations/*/lakes/*/environments/*}', - 'body': 'environment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateEnvironmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateEnvironment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateLake: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{lake.name=projects/*/locations/*/lakes/*}', - 'body': 'lake', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateLakeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateLake._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{task.name=projects/*/locations/*/lakes/*/tasks/*}', - 'body': 'task', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateZone: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{zone.name=projects/*/locations/*/lakes/*/zones/*}', - 'body': 'zone', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateZoneRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataplexServiceRestTransport._BaseUpdateZone._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDataplexServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py deleted file mode 100644 index 6d73da7177b0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetadataServiceClient -from .async_client import MetadataServiceAsyncClient - -__all__ = ( - 'MetadataServiceClient', - 'MetadataServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py deleted file mode 100644 index a764830d7bc8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ /dev/null @@ -1,1571 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport -from .client import MetadataServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class MetadataServiceAsyncClient: - """Metadata service manages metadata resources such as tables, - filesets and partitions. - """ - - _client: MetadataServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MetadataServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MetadataServiceClient._DEFAULT_UNIVERSE - - entity_path = staticmethod(MetadataServiceClient.entity_path) - parse_entity_path = staticmethod(MetadataServiceClient.parse_entity_path) - partition_path = staticmethod(MetadataServiceClient.partition_path) - parse_partition_path = staticmethod(MetadataServiceClient.parse_partition_path) - zone_path = staticmethod(MetadataServiceClient.zone_path) - parse_zone_path = staticmethod(MetadataServiceClient.parse_zone_path) - common_billing_account_path = staticmethod(MetadataServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetadataServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MetadataServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MetadataServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MetadataServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MetadataServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MetadataServiceClient.common_project_path) - parse_common_project_path = staticmethod(MetadataServiceClient.parse_common_project_path) - common_location_path = staticmethod(MetadataServiceClient.common_location_path) - parse_common_location_path = staticmethod(MetadataServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceAsyncClient: The constructed client. - """ - return MetadataServiceClient.from_service_account_info.__func__(MetadataServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceAsyncClient: The constructed client. - """ - return MetadataServiceClient.from_service_account_file.__func__(MetadataServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MetadataServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MetadataServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetadataServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MetadataServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metadata service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetadataServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetadataServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.MetadataServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "credentialsType": None, - } - ) - - async def create_entity(self, - request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, - *, - parent: Optional[str] = None, - entity: Optional[metadata_.Entity] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Create a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = await client.create_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]]): - The request object. Create a metadata entity request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity (:class:`google.cloud.dataplex_v1.types.Entity`): - Required. Entity resource. - This corresponds to the ``entity`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entity] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreateEntityRequest): - request = metadata_.CreateEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entity is not None: - request.entity = entity - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entity(self, - request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Update a metadata entity. Only supports full resource - update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = await client.update_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]]): - The request object. Update a metadata entity request. - The exiting entity will be fully - replaced by the entity in the request. - The entity ID is mutable. To modify the - ID, use the current entity ID in the - request URL and specify the new ID in - the request body. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.UpdateEntityRequest): - request = metadata_.UpdateEntityRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entity.name", request.entity.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entity(self, - request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - await client.delete_entity(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]]): - The request object. Delete a metadata entity request. - name (:class:`str`): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeleteEntityRequest): - request = metadata_.DeleteEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_entity(self, - request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Get a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]]): - The request object. Get metadata entity request. - name (:class:`str`): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetEntityRequest): - request = metadata_.GetEntityRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entities(self, - request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntitiesAsyncPager: - r"""List metadata entities in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]]): - The request object. List metadata entities request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager: - List metadata entities response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListEntitiesRequest): - request = metadata_.ListEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntitiesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_partition(self, - request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - partition: Optional[metadata_.Partition] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Partition: - r"""Create a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = await client.create_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]]): - The request object. Create metadata partition request. - parent (:class:`str`): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - partition (:class:`google.cloud.dataplex_v1.types.Partition`): - Required. Partition resource. - This corresponds to the ``partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, partition] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreatePartitionRequest): - request = metadata_.CreatePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if partition is not None: - request.partition = partition - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_partition(self, - request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_partition(request=request) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]]): - The request object. Delete metadata partition request. - name (:class:`str`): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeletePartitionRequest): - request = metadata_.DeletePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_partition(self, - request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Partition: - r"""Get a metadata partition of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]]): - The request object. Get metadata partition request. - name (:class:`str`): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetPartitionRequest): - request = metadata_.GetPartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_partitions(self, - request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListPartitionsAsyncPager: - r"""List metadata partitions of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - async def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]]): - The request object. List metadata partitions request. - parent (:class:`str`): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager: - List metadata partitions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListPartitionsRequest): - request = metadata_.ListPartitionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPartitionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MetadataServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "MetadataServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py deleted file mode 100644 index 0acd72595f44..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/client.py +++ /dev/null @@ -1,1953 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.dataplex_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetadataServiceGrpcTransport -from .transports.grpc_asyncio import MetadataServiceGrpcAsyncIOTransport -from .transports.rest import MetadataServiceRestTransport - - -class MetadataServiceClientMeta(type): - """Metaclass for the MetadataService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] - _transport_registry["grpc"] = MetadataServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MetadataServiceGrpcAsyncIOTransport - _transport_registry["rest"] = MetadataServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetadataServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetadataServiceClient(metaclass=MetadataServiceClientMeta): - """Metadata service manages metadata resources such as tables, - filesets and partitions. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "dataplex.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetadataServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetadataServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetadataServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def entity_path(project: str,location: str,lake: str,zone: str,entity: str,) -> str: - """Returns a fully-qualified entity string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - - @staticmethod - def parse_entity_path(path: str) -> Dict[str,str]: - """Parses a entity path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def partition_path(project: str,location: str,lake: str,zone: str,entity: str,partition: str,) -> str: - """Returns a fully-qualified partition string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) - - @staticmethod - def parse_partition_path(path: str) -> Dict[str,str]: - """Parses a partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)/entities/(?P.+?)/partitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def zone_path(project: str,location: str,lake: str,zone: str,) -> str: - """Returns a fully-qualified zone string.""" - return "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - - @staticmethod - def parse_zone_path(path: str) -> Dict[str,str]: - """Parses a zone path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/zones/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MetadataServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetadataServiceTransport, Callable[..., MetadataServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metadata service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetadataServiceTransport,Callable[..., MetadataServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetadataServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetadataServiceClient._read_environment_variables() - self._client_cert_source = MetadataServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MetadataServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MetadataServiceTransport) - if transport_provided: - # transport is a MetadataServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MetadataServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MetadataServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MetadataServiceTransport], Callable[..., MetadataServiceTransport]] = ( - MetadataServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MetadataServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.dataplex_v1.MetadataServiceClient`.", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "credentialsType": None, - } - ) - - def create_entity(self, - request: Optional[Union[metadata_.CreateEntityRequest, dict]] = None, - *, - parent: Optional[str] = None, - entity: Optional[metadata_.Entity] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Create a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = client.create_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): - The request object. Create a metadata entity request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity (google.cloud.dataplex_v1.types.Entity): - Required. Entity resource. - This corresponds to the ``entity`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, entity] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreateEntityRequest): - request = metadata_.CreateEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entity is not None: - request.entity = entity - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entity(self, - request: Optional[Union[metadata_.UpdateEntityRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Update a metadata entity. Only supports full resource - update. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = client.update_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): - The request object. Update a metadata entity request. - The exiting entity will be fully - replaced by the entity in the request. - The entity ID is mutable. To modify the - ID, use the current entity ID in the - request URL and specify the new ID in - the request body. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.UpdateEntityRequest): - request = metadata_.UpdateEntityRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entity.name", request.entity.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entity(self, - request: Optional[Union[metadata_.DeleteEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - client.delete_entity(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): - The request object. Delete a metadata entity request. - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeleteEntityRequest): - request = metadata_.DeleteEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_entity(self, - request: Optional[Union[metadata_.GetEntityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Entity: - r"""Get a metadata entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = client.get_entity(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetEntityRequest, dict]): - The request object. Get metadata entity request. - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetEntityRequest): - request = metadata_.GetEntityRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entity] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entities(self, - request: Optional[Union[metadata_.ListEntitiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListEntitiesPager: - r"""List metadata entities in a zone. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListEntitiesRequest, dict]): - The request object. List metadata entities request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager: - List metadata entities response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListEntitiesRequest): - request = metadata_.ListEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntitiesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_partition(self, - request: Optional[Union[metadata_.CreatePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - partition: Optional[metadata_.Partition] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Partition: - r"""Create a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = client.create_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): - The request object. Create metadata partition request. - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - partition (google.cloud.dataplex_v1.types.Partition): - Required. Partition resource. - This corresponds to the ``partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, partition] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.CreatePartitionRequest): - request = metadata_.CreatePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if partition is not None: - request.partition = partition - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_partition(self, - request: Optional[Union[metadata_.DeletePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Delete a metadata partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_partition(request=request) - - Args: - request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): - The request object. Delete metadata partition request. - name (str): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.DeletePartitionRequest): - request = metadata_.DeletePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_partition(self, - request: Optional[Union[metadata_.GetPartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metadata_.Partition: - r"""Get a metadata partition of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.GetPartitionRequest, dict]): - The request object. Get metadata partition request. - name (str): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an - ordered sequence of partition values separated by "/". - All values must be provided. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.types.Partition: - Represents partition metadata - contained within entity instances. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.GetPartitionRequest): - request = metadata_.GetPartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_partitions(self, - request: Optional[Union[metadata_.ListPartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListPartitionsPager: - r"""List metadata partitions of an entity. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataplex_v1 - - def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataplex_v1.types.ListPartitionsRequest, dict]): - The request object. List metadata partitions request. - parent (str): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager: - List metadata partitions response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metadata_.ListPartitionsRequest): - request = metadata_.ListPartitionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPartitionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MetadataServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "MetadataServiceClient", -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py deleted file mode 100644 index 33f41499e016..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/pagers.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ - - -class ListEntitiesPager: - """A pager for iterating through ``list_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entities`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntities`` requests and continue to iterate - through the ``entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metadata_.ListEntitiesResponse], - request: metadata_.ListEntitiesRequest, - response: metadata_.ListEntitiesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntitiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntitiesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metadata_.ListEntitiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metadata_.ListEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metadata_.Entity]: - for page in self.pages: - yield from page.entities - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntitiesAsyncPager: - """A pager for iterating through ``list_entities`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entities`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntities`` requests and continue to iterate - through the ``entities`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListEntitiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metadata_.ListEntitiesResponse]], - request: metadata_.ListEntitiesRequest, - response: metadata_.ListEntitiesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListEntitiesRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListEntitiesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metadata_.ListEntitiesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metadata_.ListEntitiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metadata_.Entity]: - async def async_generator(): - async for page in self.pages: - for response in page.entities: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPartitionsPager: - """A pager for iterating through ``list_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListPartitions`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metadata_.ListPartitionsResponse], - request: metadata_.ListPartitionsRequest, - response: metadata_.ListPartitionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListPartitionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListPartitionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metadata_.ListPartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metadata_.ListPartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metadata_.Partition]: - for page in self.pages: - yield from page.partitions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPartitionsAsyncPager: - """A pager for iterating through ``list_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListPartitions`` requests and continue to iterate - through the ``partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataplex_v1.types.ListPartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metadata_.ListPartitionsResponse]], - request: metadata_.ListPartitionsRequest, - response: metadata_.ListPartitionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataplex_v1.types.ListPartitionsRequest): - The initial request object. - response (google.cloud.dataplex_v1.types.ListPartitionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metadata_.ListPartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metadata_.ListPartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metadata_.Partition]: - async def async_generator(): - async for page in self.pages: - for response in page.partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst deleted file mode 100644 index ff25cadba5cb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MetadataServiceTransport` is the ABC for all transports. -- public child `MetadataServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MetadataServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMetadataServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MetadataServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py deleted file mode 100644 index 373a4faff810..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetadataServiceTransport -from .grpc import MetadataServiceGrpcTransport -from .grpc_asyncio import MetadataServiceGrpcAsyncIOTransport -from .rest import MetadataServiceRestTransport -from .rest import MetadataServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetadataServiceTransport]] -_transport_registry['grpc'] = MetadataServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MetadataServiceGrpcAsyncIOTransport -_transport_registry['rest'] = MetadataServiceRestTransport - -__all__ = ( - 'MetadataServiceTransport', - 'MetadataServiceGrpcTransport', - 'MetadataServiceGrpcAsyncIOTransport', - 'MetadataServiceRestTransport', - 'MetadataServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py deleted file mode 100644 index b184c3faf9f1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/base.py +++ /dev/null @@ -1,394 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class MetadataServiceTransport(abc.ABC): - """Abstract transport class for MetadataService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataplex.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_entity: gapic_v1.method.wrap_method( - self.create_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entity: gapic_v1.method.wrap_method( - self.update_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entity: gapic_v1.method.wrap_method( - self.delete_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.get_entity: gapic_v1.method.wrap_method( - self.get_entity, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_entities: gapic_v1.method.wrap_method( - self.list_entities, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_partition: gapic_v1.method.wrap_method( - self.create_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_partition: gapic_v1.method.wrap_method( - self.delete_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.get_partition: gapic_v1.method.wrap_method( - self.get_partition, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_partitions: gapic_v1.method.wrap_method( - self.list_partitions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - Union[ - metadata_.Entity, - Awaitable[metadata_.Entity] - ]]: - raise NotImplementedError() - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - Union[ - metadata_.ListEntitiesResponse, - Awaitable[metadata_.ListEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - Union[ - metadata_.Partition, - Awaitable[metadata_.Partition] - ]]: - raise NotImplementedError() - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - Union[ - metadata_.Partition, - Awaitable[metadata_.Partition] - ]]: - raise NotImplementedError() - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - Union[ - metadata_.ListPartitionsResponse, - Awaitable[metadata_.ListPartitionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MetadataServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py deleted file mode 100644 index f1c6337633c9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py +++ /dev/null @@ -1,669 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetadataServiceGrpcTransport(MetadataServiceTransport): - """gRPC backend transport for MetadataService. - - Metadata service manages metadata resources such as tables, - filesets and partitions. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - metadata_.Entity]: - r"""Return a callable for the create entity method over gRPC. - - Create a metadata entity. - - Returns: - Callable[[~.CreateEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entity' not in self._stubs: - self._stubs['create_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreateEntity', - request_serializer=metadata_.CreateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['create_entity'] - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - metadata_.Entity]: - r"""Return a callable for the update entity method over gRPC. - - Update a metadata entity. Only supports full resource - update. - - Returns: - Callable[[~.UpdateEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entity' not in self._stubs: - self._stubs['update_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', - request_serializer=metadata_.UpdateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['update_entity'] - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entity method over gRPC. - - Delete a metadata entity. - - Returns: - Callable[[~.DeleteEntityRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entity' not in self._stubs: - self._stubs['delete_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', - request_serializer=metadata_.DeleteEntityRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entity'] - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - metadata_.Entity]: - r"""Return a callable for the get entity method over gRPC. - - Get a metadata entity. - - Returns: - Callable[[~.GetEntityRequest], - ~.Entity]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entity' not in self._stubs: - self._stubs['get_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetEntity', - request_serializer=metadata_.GetEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['get_entity'] - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - metadata_.ListEntitiesResponse]: - r"""Return a callable for the list entities method over gRPC. - - List metadata entities in a zone. - - Returns: - Callable[[~.ListEntitiesRequest], - ~.ListEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entities' not in self._stubs: - self._stubs['list_entities'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListEntities', - request_serializer=metadata_.ListEntitiesRequest.serialize, - response_deserializer=metadata_.ListEntitiesResponse.deserialize, - ) - return self._stubs['list_entities'] - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - metadata_.Partition]: - r"""Return a callable for the create partition method over gRPC. - - Create a metadata partition. - - Returns: - Callable[[~.CreatePartitionRequest], - ~.Partition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_partition' not in self._stubs: - self._stubs['create_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreatePartition', - request_serializer=metadata_.CreatePartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['create_partition'] - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete partition method over gRPC. - - Delete a metadata partition. - - Returns: - Callable[[~.DeletePartitionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_partition' not in self._stubs: - self._stubs['delete_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeletePartition', - request_serializer=metadata_.DeletePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_partition'] - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - metadata_.Partition]: - r"""Return a callable for the get partition method over gRPC. - - Get a metadata partition of an entity. - - Returns: - Callable[[~.GetPartitionRequest], - ~.Partition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_partition' not in self._stubs: - self._stubs['get_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetPartition', - request_serializer=metadata_.GetPartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['get_partition'] - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - metadata_.ListPartitionsResponse]: - r"""Return a callable for the list partitions method over gRPC. - - List metadata partitions of an entity. - - Returns: - Callable[[~.ListPartitionsRequest], - ~.ListPartitionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_partitions' not in self._stubs: - self._stubs['list_partitions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListPartitions', - request_serializer=metadata_.ListPartitionsRequest.serialize, - response_deserializer=metadata_.ListPartitionsResponse.deserialize, - ) - return self._stubs['list_partitions'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MetadataServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py deleted file mode 100644 index 2860f724048e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,796 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MetadataServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetadataServiceGrpcAsyncIOTransport(MetadataServiceTransport): - """gRPC AsyncIO backend transport for MetadataService. - - Metadata service manages metadata resources such as tables, - filesets and partitions. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the create entity method over gRPC. - - Create a metadata entity. - - Returns: - Callable[[~.CreateEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entity' not in self._stubs: - self._stubs['create_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreateEntity', - request_serializer=metadata_.CreateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['create_entity'] - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the update entity method over gRPC. - - Update a metadata entity. Only supports full resource - update. - - Returns: - Callable[[~.UpdateEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entity' not in self._stubs: - self._stubs['update_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/UpdateEntity', - request_serializer=metadata_.UpdateEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['update_entity'] - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entity method over gRPC. - - Delete a metadata entity. - - Returns: - Callable[[~.DeleteEntityRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entity' not in self._stubs: - self._stubs['delete_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeleteEntity', - request_serializer=metadata_.DeleteEntityRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entity'] - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - Awaitable[metadata_.Entity]]: - r"""Return a callable for the get entity method over gRPC. - - Get a metadata entity. - - Returns: - Callable[[~.GetEntityRequest], - Awaitable[~.Entity]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entity' not in self._stubs: - self._stubs['get_entity'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetEntity', - request_serializer=metadata_.GetEntityRequest.serialize, - response_deserializer=metadata_.Entity.deserialize, - ) - return self._stubs['get_entity'] - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - Awaitable[metadata_.ListEntitiesResponse]]: - r"""Return a callable for the list entities method over gRPC. - - List metadata entities in a zone. - - Returns: - Callable[[~.ListEntitiesRequest], - Awaitable[~.ListEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entities' not in self._stubs: - self._stubs['list_entities'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListEntities', - request_serializer=metadata_.ListEntitiesRequest.serialize, - response_deserializer=metadata_.ListEntitiesResponse.deserialize, - ) - return self._stubs['list_entities'] - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - Awaitable[metadata_.Partition]]: - r"""Return a callable for the create partition method over gRPC. - - Create a metadata partition. - - Returns: - Callable[[~.CreatePartitionRequest], - Awaitable[~.Partition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_partition' not in self._stubs: - self._stubs['create_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/CreatePartition', - request_serializer=metadata_.CreatePartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['create_partition'] - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete partition method over gRPC. - - Delete a metadata partition. - - Returns: - Callable[[~.DeletePartitionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_partition' not in self._stubs: - self._stubs['delete_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/DeletePartition', - request_serializer=metadata_.DeletePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_partition'] - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - Awaitable[metadata_.Partition]]: - r"""Return a callable for the get partition method over gRPC. - - Get a metadata partition of an entity. - - Returns: - Callable[[~.GetPartitionRequest], - Awaitable[~.Partition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_partition' not in self._stubs: - self._stubs['get_partition'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/GetPartition', - request_serializer=metadata_.GetPartitionRequest.serialize, - response_deserializer=metadata_.Partition.deserialize, - ) - return self._stubs['get_partition'] - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - Awaitable[metadata_.ListPartitionsResponse]]: - r"""Return a callable for the list partitions method over gRPC. - - List metadata partitions of an entity. - - Returns: - Callable[[~.ListPartitionsRequest], - Awaitable[~.ListPartitionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_partitions' not in self._stubs: - self._stubs['list_partitions'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.MetadataService/ListPartitions', - request_serializer=metadata_.ListPartitionsRequest.serialize, - response_deserializer=metadata_.ListPartitionsResponse.deserialize, - ) - return self._stubs['list_partitions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_entity: self._wrap_method( - self.create_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.update_entity: self._wrap_method( - self.update_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_entity: self._wrap_method( - self.delete_entity, - default_timeout=60.0, - client_info=client_info, - ), - self.get_entity: self._wrap_method( - self.get_entity, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_entities: self._wrap_method( - self.list_entities, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_partition: self._wrap_method( - self.create_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_partition: self._wrap_method( - self.delete_partition, - default_timeout=60.0, - client_info=client_info, - ), - self.get_partition: self._wrap_method( - self.get_partition, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_partitions: self._wrap_method( - self.list_partitions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'MetadataServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py deleted file mode 100644 index 617fb4ddec8d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest.py +++ /dev/null @@ -1,2403 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.dataplex_v1.types import metadata_ -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseMetadataServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class MetadataServiceRestInterceptor: - """Interceptor for MetadataService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetadataServiceRestTransport. - - .. code-block:: python - class MyCustomMetadataServiceInterceptor(MetadataServiceRestInterceptor): - def pre_create_entity(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_entity(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_entity(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_entity(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_entity(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_partitions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_partitions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_entity(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_entity(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetadataServiceRestTransport(interceptor=MyCustomMetadataServiceInterceptor()) - client = MetadataServiceClient(transport=transport) - - - """ - def pre_create_entity(self, request: metadata_.CreateEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.CreateEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_entity - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_create_entity(self, response: metadata_.Entity) -> metadata_.Entity: - """Post-rpc interceptor for create_entity - - DEPRECATED. Please use the `post_create_entity_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_create_entity` interceptor runs - before the `post_create_entity_with_metadata` interceptor. - """ - return response - - def post_create_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_entity - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_create_entity_with_metadata` - interceptor in new development instead of the `post_create_entity` interceptor. - When both interceptors are used, this `post_create_entity_with_metadata` interceptor runs after the - `post_create_entity` interceptor. The (possibly modified) response returned by - `post_create_entity` will be passed to - `post_create_entity_with_metadata`. - """ - return response, metadata - - def pre_create_partition(self, request: metadata_.CreatePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.CreatePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_create_partition(self, response: metadata_.Partition) -> metadata_.Partition: - """Post-rpc interceptor for create_partition - - DEPRECATED. Please use the `post_create_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_create_partition` interceptor runs - before the `post_create_partition_with_metadata` interceptor. - """ - return response - - def post_create_partition_with_metadata(self, response: metadata_.Partition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Partition, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_create_partition_with_metadata` - interceptor in new development instead of the `post_create_partition` interceptor. - When both interceptors are used, this `post_create_partition_with_metadata` interceptor runs after the - `post_create_partition` interceptor. The (possibly modified) response returned by - `post_create_partition` will be passed to - `post_create_partition_with_metadata`. - """ - return response, metadata - - def pre_delete_entity(self, request: metadata_.DeleteEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.DeleteEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_entity - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def pre_delete_partition(self, request: metadata_.DeletePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.DeletePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def pre_get_entity(self, request: metadata_.GetEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.GetEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_entity - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_get_entity(self, response: metadata_.Entity) -> metadata_.Entity: - """Post-rpc interceptor for get_entity - - DEPRECATED. Please use the `post_get_entity_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_get_entity` interceptor runs - before the `post_get_entity_with_metadata` interceptor. - """ - return response - - def post_get_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_entity - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_get_entity_with_metadata` - interceptor in new development instead of the `post_get_entity` interceptor. - When both interceptors are used, this `post_get_entity_with_metadata` interceptor runs after the - `post_get_entity` interceptor. The (possibly modified) response returned by - `post_get_entity` will be passed to - `post_get_entity_with_metadata`. - """ - return response, metadata - - def pre_get_partition(self, request: metadata_.GetPartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.GetPartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_get_partition(self, response: metadata_.Partition) -> metadata_.Partition: - """Post-rpc interceptor for get_partition - - DEPRECATED. Please use the `post_get_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_get_partition` interceptor runs - before the `post_get_partition_with_metadata` interceptor. - """ - return response - - def post_get_partition_with_metadata(self, response: metadata_.Partition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Partition, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_get_partition_with_metadata` - interceptor in new development instead of the `post_get_partition` interceptor. - When both interceptors are used, this `post_get_partition_with_metadata` interceptor runs after the - `post_get_partition` interceptor. The (possibly modified) response returned by - `post_get_partition` will be passed to - `post_get_partition_with_metadata`. - """ - return response, metadata - - def pre_list_entities(self, request: metadata_.ListEntitiesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListEntitiesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_list_entities(self, response: metadata_.ListEntitiesResponse) -> metadata_.ListEntitiesResponse: - """Post-rpc interceptor for list_entities - - DEPRECATED. Please use the `post_list_entities_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_list_entities` interceptor runs - before the `post_list_entities_with_metadata` interceptor. - """ - return response - - def post_list_entities_with_metadata(self, response: metadata_.ListEntitiesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListEntitiesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_entities - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_list_entities_with_metadata` - interceptor in new development instead of the `post_list_entities` interceptor. - When both interceptors are used, this `post_list_entities_with_metadata` interceptor runs after the - `post_list_entities` interceptor. The (possibly modified) response returned by - `post_list_entities` will be passed to - `post_list_entities_with_metadata`. - """ - return response, metadata - - def pre_list_partitions(self, request: metadata_.ListPartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListPartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_partitions - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_list_partitions(self, response: metadata_.ListPartitionsResponse) -> metadata_.ListPartitionsResponse: - """Post-rpc interceptor for list_partitions - - DEPRECATED. Please use the `post_list_partitions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_list_partitions` interceptor runs - before the `post_list_partitions_with_metadata` interceptor. - """ - return response - - def post_list_partitions_with_metadata(self, response: metadata_.ListPartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.ListPartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_partitions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_list_partitions_with_metadata` - interceptor in new development instead of the `post_list_partitions` interceptor. - When both interceptors are used, this `post_list_partitions_with_metadata` interceptor runs after the - `post_list_partitions` interceptor. The (possibly modified) response returned by - `post_list_partitions` will be passed to - `post_list_partitions_with_metadata`. - """ - return response, metadata - - def pre_update_entity(self, request: metadata_.UpdateEntityRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.UpdateEntityRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_entity - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_update_entity(self, response: metadata_.Entity) -> metadata_.Entity: - """Post-rpc interceptor for update_entity - - DEPRECATED. Please use the `post_update_entity_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. This `post_update_entity` interceptor runs - before the `post_update_entity_with_metadata` interceptor. - """ - return response - - def post_update_entity_with_metadata(self, response: metadata_.Entity, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metadata_.Entity, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_entity - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetadataService server but before it is returned to user code. - - We recommend only using this `post_update_entity_with_metadata` - interceptor in new development instead of the `post_update_entity` interceptor. - When both interceptors are used, this `post_update_entity_with_metadata` interceptor runs after the - `post_update_entity` interceptor. The (possibly modified) response returned by - `post_update_entity` will be passed to - `post_update_entity_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetadataService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the MetadataService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class MetadataServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetadataServiceRestInterceptor - - -class MetadataServiceRestTransport(_BaseMetadataServiceRestTransport): - """REST backend synchronous transport for MetadataService. - - Metadata service manages metadata resources such as tables, - filesets and partitions. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetadataServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetadataServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateEntity(_BaseMetadataServiceRestTransport._BaseCreateEntity, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.CreateEntity") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metadata_.CreateEntityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.Entity: - r"""Call the create entity method over HTTP. - - Args: - request (~.metadata_.CreateEntityRequest): - The request object. Create a metadata entity request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - - http_options = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_http_options() - - request, metadata = self._interceptor.pre_create_entity(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_transcoded_request(http_options, request) - - body = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseCreateEntity._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CreateEntity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "CreateEntity", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._CreateEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.Entity() - pb_resp = metadata_.Entity.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_entity(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_entity_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.Entity.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.create_entity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "CreateEntity", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreatePartition(_BaseMetadataServiceRestTransport._BaseCreatePartition, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.CreatePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metadata_.CreatePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.Partition: - r"""Call the create partition method over HTTP. - - Args: - request (~.metadata_.CreatePartitionRequest): - The request object. Create metadata partition request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.Partition: - Represents partition metadata - contained within entity instances. - - """ - - http_options = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_http_options() - - request, metadata = self._interceptor.pre_create_partition(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_transcoded_request(http_options, request) - - body = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseCreatePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CreatePartition", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "CreatePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._CreatePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.Partition() - pb_resp = metadata_.Partition.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.Partition.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.create_partition", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "CreatePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteEntity(_BaseMetadataServiceRestTransport._BaseDeleteEntity, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.DeleteEntity") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.DeleteEntityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete entity method over HTTP. - - Args: - request (~.metadata_.DeleteEntityRequest): - The request object. Delete a metadata entity request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_http_options() - - request, metadata = self._interceptor.pre_delete_entity(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseDeleteEntity._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeleteEntity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "DeleteEntity", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._DeleteEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeletePartition(_BaseMetadataServiceRestTransport._BaseDeletePartition, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.DeletePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.DeletePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete partition method over HTTP. - - Args: - request (~.metadata_.DeletePartitionRequest): - The request object. Delete metadata partition request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_http_options() - - request, metadata = self._interceptor.pre_delete_partition(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseDeletePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeletePartition", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "DeletePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._DeletePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetEntity(_BaseMetadataServiceRestTransport._BaseGetEntity, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.GetEntity") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.GetEntityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.Entity: - r"""Call the get entity method over HTTP. - - Args: - request (~.metadata_.GetEntityRequest): - The request object. Get metadata entity request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - - http_options = _BaseMetadataServiceRestTransport._BaseGetEntity._get_http_options() - - request, metadata = self._interceptor.pre_get_entity(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseGetEntity._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseGetEntity._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetEntity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetEntity", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._GetEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.Entity() - pb_resp = metadata_.Entity.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_entity(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_entity_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.Entity.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.get_entity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetEntity", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetPartition(_BaseMetadataServiceRestTransport._BaseGetPartition, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.GetPartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.GetPartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.Partition: - r"""Call the get partition method over HTTP. - - Args: - request (~.metadata_.GetPartitionRequest): - The request object. Get metadata partition request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.Partition: - Represents partition metadata - contained within entity instances. - - """ - - http_options = _BaseMetadataServiceRestTransport._BaseGetPartition._get_http_options() - - request, metadata = self._interceptor.pre_get_partition(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseGetPartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseGetPartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetPartition", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetPartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._GetPartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.Partition() - pb_resp = metadata_.Partition.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.Partition.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.get_partition", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetPartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListEntities(_BaseMetadataServiceRestTransport._BaseListEntities, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.ListEntities") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.ListEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.ListEntitiesResponse: - r"""Call the list entities method over HTTP. - - Args: - request (~.metadata_.ListEntitiesRequest): - The request object. List metadata entities request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.ListEntitiesResponse: - List metadata entities response. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseListEntities._get_http_options() - - request, metadata = self._interceptor.pre_list_entities(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseListEntities._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseListEntities._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListEntities", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListEntities", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._ListEntities._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.ListEntitiesResponse() - pb_resp = metadata_.ListEntitiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_entities(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_entities_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.ListEntitiesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.list_entities", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListEntities", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListPartitions(_BaseMetadataServiceRestTransport._BaseListPartitions, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.ListPartitions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metadata_.ListPartitionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.ListPartitionsResponse: - r"""Call the list partitions method over HTTP. - - Args: - request (~.metadata_.ListPartitionsRequest): - The request object. List metadata partitions request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.ListPartitionsResponse: - List metadata partitions response. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseListPartitions._get_http_options() - - request, metadata = self._interceptor.pre_list_partitions(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseListPartitions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseListPartitions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListPartitions", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListPartitions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._ListPartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.ListPartitionsResponse() - pb_resp = metadata_.ListPartitionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_partitions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_partitions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.ListPartitionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListPartitions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateEntity(_BaseMetadataServiceRestTransport._BaseUpdateEntity, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.UpdateEntity") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metadata_.UpdateEntityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metadata_.Entity: - r"""Call the update entity method over HTTP. - - Args: - request (~.metadata_.UpdateEntityRequest): - The request object. Update a metadata entity request. - The exiting entity will be fully - replaced by the entity in the request. - The entity ID is mutable. To modify the - ID, use the current entity ID in the - request URL and specify the new ID in - the request body. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metadata_.Entity: - Represents tables and fileset - metadata contained within a zone. - - """ - - http_options = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_http_options() - - request, metadata = self._interceptor.pre_update_entity(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_transcoded_request(http_options, request) - - body = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseUpdateEntity._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.UpdateEntity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "UpdateEntity", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._UpdateEntity._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metadata_.Entity() - pb_resp = metadata_.Entity.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_entity(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_entity_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metadata_.Entity.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceClient.update_entity", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "UpdateEntity", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_entity(self) -> Callable[ - [metadata_.CreateEntityRequest], - metadata_.Entity]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateEntity(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_partition(self) -> Callable[ - [metadata_.CreatePartitionRequest], - metadata_.Partition]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreatePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_entity(self) -> Callable[ - [metadata_.DeleteEntityRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteEntity(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_partition(self) -> Callable[ - [metadata_.DeletePartitionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeletePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_entity(self) -> Callable[ - [metadata_.GetEntityRequest], - metadata_.Entity]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetEntity(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_partition(self) -> Callable[ - [metadata_.GetPartitionRequest], - metadata_.Partition]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetPartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_entities(self) -> Callable[ - [metadata_.ListEntitiesRequest], - metadata_.ListEntitiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_partitions(self) -> Callable[ - [metadata_.ListPartitionsRequest], - metadata_.ListPartitionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListPartitions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_entity(self) -> Callable[ - [metadata_.UpdateEntityRequest], - metadata_.Entity]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateEntity(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseMetadataServiceRestTransport._BaseGetLocation, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseMetadataServiceRestTransport._BaseListLocations, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseMetadataServiceRestTransport._BaseCancelOperation, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseMetadataServiceRestTransport._BaseDeleteOperation, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseMetadataServiceRestTransport._BaseGetOperation, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseMetadataServiceRestTransport._BaseListOperations, MetadataServiceRestStub): - def __hash__(self): - return hash("MetadataServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseMetadataServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseMetadataServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetadataServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.dataplex_v1.MetadataServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetadataServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.dataplex_v1.MetadataServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.dataplex.v1.MetadataService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetadataServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py deleted file mode 100644 index c61cbbe05b13..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py +++ /dev/null @@ -1,631 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.dataplex_v1.types import metadata_ -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseMetadataServiceRestTransport(MetadataServiceTransport): - """Base REST backend transport for MetadataService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'dataplex.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateEntity: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities', - 'body': 'entity', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.CreateEntityRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseCreateEntity._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreatePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions', - 'body': 'partition', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.CreatePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseCreatePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteEntity: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "etag" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.DeleteEntityRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseDeleteEntity._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeletePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.DeletePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseDeletePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetEntity: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.GetEntityRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseGetEntity._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetPartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.GetPartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseGetPartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListEntities: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "view" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.ListEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseListEntities._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListPartitions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.ListPartitionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseListPartitions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateEntity: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v1/{entity.name=projects/*/locations/*/lakes/*/zones/*/entities/*}', - 'body': 'entity', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metadata_.UpdateEntityRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetadataServiceRestTransport._BaseUpdateEntity._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseMetadataServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py deleted file mode 100644 index 995e34a65b4a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/__init__.py +++ /dev/null @@ -1,484 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .analyze import ( - Content, - Environment, - Session, -) -from .business_glossary import ( - CreateGlossaryCategoryRequest, - CreateGlossaryRequest, - CreateGlossaryTermRequest, - DeleteGlossaryCategoryRequest, - DeleteGlossaryRequest, - DeleteGlossaryTermRequest, - GetGlossaryCategoryRequest, - GetGlossaryRequest, - GetGlossaryTermRequest, - Glossary, - GlossaryCategory, - GlossaryTerm, - ListGlossariesRequest, - ListGlossariesResponse, - ListGlossaryCategoriesRequest, - ListGlossaryCategoriesResponse, - ListGlossaryTermsRequest, - ListGlossaryTermsResponse, - UpdateGlossaryCategoryRequest, - UpdateGlossaryRequest, - UpdateGlossaryTermRequest, -) -from .catalog import ( - Aspect, - AspectSource, - AspectType, - CancelMetadataJobRequest, - CreateAspectTypeRequest, - CreateEntryGroupRequest, - CreateEntryLinkRequest, - CreateEntryRequest, - CreateEntryTypeRequest, - CreateMetadataJobRequest, - DeleteAspectTypeRequest, - DeleteEntryGroupRequest, - DeleteEntryLinkRequest, - DeleteEntryRequest, - DeleteEntryTypeRequest, - Entry, - EntryGroup, - EntryLink, - EntrySource, - EntryType, - GetAspectTypeRequest, - GetEntryGroupRequest, - GetEntryLinkRequest, - GetEntryRequest, - GetEntryTypeRequest, - GetMetadataJobRequest, - ImportItem, - ListAspectTypesRequest, - ListAspectTypesResponse, - ListEntriesRequest, - ListEntriesResponse, - ListEntryGroupsRequest, - ListEntryGroupsResponse, - ListEntryTypesRequest, - ListEntryTypesResponse, - ListMetadataJobsRequest, - ListMetadataJobsResponse, - LookupEntryRequest, - MetadataJob, - SearchEntriesRequest, - SearchEntriesResponse, - SearchEntriesResult, - UpdateAspectTypeRequest, - UpdateEntryGroupRequest, - UpdateEntryRequest, - UpdateEntryTypeRequest, - EntryView, - TransferStatus, -) -from .cmek import ( - CreateEncryptionConfigRequest, - DeleteEncryptionConfigRequest, - EncryptionConfig, - GetEncryptionConfigRequest, - ListEncryptionConfigsRequest, - ListEncryptionConfigsResponse, - UpdateEncryptionConfigRequest, -) -from .content import ( - CreateContentRequest, - DeleteContentRequest, - GetContentRequest, - ListContentRequest, - ListContentResponse, - UpdateContentRequest, -) -from .data_discovery import ( - DataDiscoveryResult, - DataDiscoverySpec, -) -from .data_profile import ( - DataProfileResult, - DataProfileSpec, -) -from .data_quality import ( - DataQualityColumnResult, - DataQualityDimension, - DataQualityDimensionResult, - DataQualityResult, - DataQualityRule, - DataQualityRuleResult, - DataQualitySpec, -) -from .data_taxonomy import ( - CreateDataAttributeBindingRequest, - CreateDataAttributeRequest, - CreateDataTaxonomyRequest, - DataAttribute, - DataAttributeBinding, - DataTaxonomy, - DeleteDataAttributeBindingRequest, - DeleteDataAttributeRequest, - DeleteDataTaxonomyRequest, - GetDataAttributeBindingRequest, - GetDataAttributeRequest, - GetDataTaxonomyRequest, - ListDataAttributeBindingsRequest, - ListDataAttributeBindingsResponse, - ListDataAttributesRequest, - ListDataAttributesResponse, - ListDataTaxonomiesRequest, - ListDataTaxonomiesResponse, - UpdateDataAttributeBindingRequest, - UpdateDataAttributeRequest, - UpdateDataTaxonomyRequest, -) -from .datascans import ( - CreateDataScanRequest, - DataScan, - DataScanJob, - DeleteDataScanRequest, - GenerateDataQualityRulesRequest, - GenerateDataQualityRulesResponse, - GetDataScanJobRequest, - GetDataScanRequest, - ListDataScanJobsRequest, - ListDataScanJobsResponse, - ListDataScansRequest, - ListDataScansResponse, - RunDataScanRequest, - RunDataScanResponse, - UpdateDataScanRequest, - DataScanType, -) -from .datascans_common import ( - DataScanCatalogPublishingStatus, -) -from .logs import ( - BusinessGlossaryEvent, - DataQualityScanRuleResult, - DataScanEvent, - DiscoveryEvent, - EntryLinkEvent, - GovernanceEvent, - JobEvent, - SessionEvent, -) -from .metadata_ import ( - CreateEntityRequest, - CreatePartitionRequest, - DeleteEntityRequest, - DeletePartitionRequest, - Entity, - GetEntityRequest, - GetPartitionRequest, - ListEntitiesRequest, - ListEntitiesResponse, - ListPartitionsRequest, - ListPartitionsResponse, - Partition, - Schema, - StorageAccess, - StorageFormat, - UpdateEntityRequest, - StorageSystem, -) -from .processing import ( - DataSource, - ScannedData, - Trigger, -) -from .resources import ( - Action, - Asset, - AssetStatus, - Lake, - Zone, - State, -) -from .security import ( - DataAccessSpec, - ResourceAccessSpec, -) -from .service import ( - CancelJobRequest, - CreateAssetRequest, - CreateEnvironmentRequest, - CreateLakeRequest, - CreateTaskRequest, - CreateZoneRequest, - DeleteAssetRequest, - DeleteEnvironmentRequest, - DeleteLakeRequest, - DeleteTaskRequest, - DeleteZoneRequest, - GetAssetRequest, - GetEnvironmentRequest, - GetJobRequest, - GetLakeRequest, - GetTaskRequest, - GetZoneRequest, - ListActionsResponse, - ListAssetActionsRequest, - ListAssetsRequest, - ListAssetsResponse, - ListEnvironmentsRequest, - ListEnvironmentsResponse, - ListJobsRequest, - ListJobsResponse, - ListLakeActionsRequest, - ListLakesRequest, - ListLakesResponse, - ListSessionsRequest, - ListSessionsResponse, - ListTasksRequest, - ListTasksResponse, - ListZoneActionsRequest, - ListZonesRequest, - ListZonesResponse, - OperationMetadata, - RunTaskRequest, - RunTaskResponse, - UpdateAssetRequest, - UpdateEnvironmentRequest, - UpdateLakeRequest, - UpdateTaskRequest, - UpdateZoneRequest, -) -from .tasks import ( - Job, - Task, -) - -__all__ = ( - 'Content', - 'Environment', - 'Session', - 'CreateGlossaryCategoryRequest', - 'CreateGlossaryRequest', - 'CreateGlossaryTermRequest', - 'DeleteGlossaryCategoryRequest', - 'DeleteGlossaryRequest', - 'DeleteGlossaryTermRequest', - 'GetGlossaryCategoryRequest', - 'GetGlossaryRequest', - 'GetGlossaryTermRequest', - 'Glossary', - 'GlossaryCategory', - 'GlossaryTerm', - 'ListGlossariesRequest', - 'ListGlossariesResponse', - 'ListGlossaryCategoriesRequest', - 'ListGlossaryCategoriesResponse', - 'ListGlossaryTermsRequest', - 'ListGlossaryTermsResponse', - 'UpdateGlossaryCategoryRequest', - 'UpdateGlossaryRequest', - 'UpdateGlossaryTermRequest', - 'Aspect', - 'AspectSource', - 'AspectType', - 'CancelMetadataJobRequest', - 'CreateAspectTypeRequest', - 'CreateEntryGroupRequest', - 'CreateEntryLinkRequest', - 'CreateEntryRequest', - 'CreateEntryTypeRequest', - 'CreateMetadataJobRequest', - 'DeleteAspectTypeRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryLinkRequest', - 'DeleteEntryRequest', - 'DeleteEntryTypeRequest', - 'Entry', - 'EntryGroup', - 'EntryLink', - 'EntrySource', - 'EntryType', - 'GetAspectTypeRequest', - 'GetEntryGroupRequest', - 'GetEntryLinkRequest', - 'GetEntryRequest', - 'GetEntryTypeRequest', - 'GetMetadataJobRequest', - 'ImportItem', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'LookupEntryRequest', - 'MetadataJob', - 'SearchEntriesRequest', - 'SearchEntriesResponse', - 'SearchEntriesResult', - 'UpdateAspectTypeRequest', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateEntryTypeRequest', - 'EntryView', - 'TransferStatus', - 'CreateEncryptionConfigRequest', - 'DeleteEncryptionConfigRequest', - 'EncryptionConfig', - 'GetEncryptionConfigRequest', - 'ListEncryptionConfigsRequest', - 'ListEncryptionConfigsResponse', - 'UpdateEncryptionConfigRequest', - 'CreateContentRequest', - 'DeleteContentRequest', - 'GetContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'UpdateContentRequest', - 'DataDiscoveryResult', - 'DataDiscoverySpec', - 'DataProfileResult', - 'DataProfileSpec', - 'DataQualityColumnResult', - 'DataQualityDimension', - 'DataQualityDimensionResult', - 'DataQualityResult', - 'DataQualityRule', - 'DataQualityRuleResult', - 'DataQualitySpec', - 'CreateDataAttributeBindingRequest', - 'CreateDataAttributeRequest', - 'CreateDataTaxonomyRequest', - 'DataAttribute', - 'DataAttributeBinding', - 'DataTaxonomy', - 'DeleteDataAttributeBindingRequest', - 'DeleteDataAttributeRequest', - 'DeleteDataTaxonomyRequest', - 'GetDataAttributeBindingRequest', - 'GetDataAttributeRequest', - 'GetDataTaxonomyRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'UpdateDataAttributeBindingRequest', - 'UpdateDataAttributeRequest', - 'UpdateDataTaxonomyRequest', - 'CreateDataScanRequest', - 'DataScan', - 'DataScanJob', - 'DeleteDataScanRequest', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'GetDataScanJobRequest', - 'GetDataScanRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'UpdateDataScanRequest', - 'DataScanType', - 'DataScanCatalogPublishingStatus', - 'BusinessGlossaryEvent', - 'DataQualityScanRuleResult', - 'DataScanEvent', - 'DiscoveryEvent', - 'EntryLinkEvent', - 'GovernanceEvent', - 'JobEvent', - 'SessionEvent', - 'CreateEntityRequest', - 'CreatePartitionRequest', - 'DeleteEntityRequest', - 'DeletePartitionRequest', - 'Entity', - 'GetEntityRequest', - 'GetPartitionRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'ListPartitionsRequest', - 'ListPartitionsResponse', - 'Partition', - 'Schema', - 'StorageAccess', - 'StorageFormat', - 'UpdateEntityRequest', - 'StorageSystem', - 'DataSource', - 'ScannedData', - 'Trigger', - 'Action', - 'Asset', - 'AssetStatus', - 'Lake', - 'Zone', - 'State', - 'DataAccessSpec', - 'ResourceAccessSpec', - 'CancelJobRequest', - 'CreateAssetRequest', - 'CreateEnvironmentRequest', - 'CreateLakeRequest', - 'CreateTaskRequest', - 'CreateZoneRequest', - 'DeleteAssetRequest', - 'DeleteEnvironmentRequest', - 'DeleteLakeRequest', - 'DeleteTaskRequest', - 'DeleteZoneRequest', - 'GetAssetRequest', - 'GetEnvironmentRequest', - 'GetJobRequest', - 'GetLakeRequest', - 'GetTaskRequest', - 'GetZoneRequest', - 'ListActionsResponse', - 'ListAssetActionsRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListLakeActionsRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'ListZoneActionsRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'OperationMetadata', - 'RunTaskRequest', - 'RunTaskResponse', - 'UpdateAssetRequest', - 'UpdateEnvironmentRequest', - 'UpdateLakeRequest', - 'UpdateTaskRequest', - 'UpdateZoneRequest', - 'Job', - 'Task', -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py deleted file mode 100644 index 93e0598c390f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/analyze.py +++ /dev/null @@ -1,492 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import resources -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Environment', - 'Content', - 'Session', - }, -) - - -class Environment(proto.Message): - r"""Environment represents a user-visible compute infrastructure - for analytics within a lake. - - Attributes: - name (str): - Output only. The relative resource name of the environment, - of the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the environment. This ID will be - different if the environment is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Environment creation time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the environment - was last updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the - environment. - description (str): - Optional. Description of the environment. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the - environment. - infrastructure_spec (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec): - Required. Infrastructure specification for - the Environment. - session_spec (google.cloud.dataplex_v1.types.Environment.SessionSpec): - Optional. Configuration for sessions created - for this environment. - session_status (google.cloud.dataplex_v1.types.Environment.SessionStatus): - Output only. Status of sessions created for - this environment. - endpoints (google.cloud.dataplex_v1.types.Environment.Endpoints): - Output only. URI Endpoints to access sessions - associated with the Environment. - """ - - class InfrastructureSpec(proto.Message): - r"""Configuration for the underlying infrastructure used to run - workloads. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - compute (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.ComputeResources): - Optional. Compute resources needed for - analyze interactive workloads. - - This field is a member of `oneof`_ ``resources``. - os_image (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime): - Required. Software Runtime Configuration for - analyze interactive workloads. - - This field is a member of `oneof`_ ``runtime``. - """ - - class ComputeResources(proto.Message): - r"""Compute resources associated with the analyze interactive - workloads. - - Attributes: - disk_size_gb (int): - Optional. Size in GB of the disk. Default is - 100 GB. - node_count (int): - Optional. Total number of nodes in the - sessions created for this environment. - max_node_count (int): - Optional. Max configurable nodes. If max_node_count > - node_count, then auto-scaling is enabled. - """ - - disk_size_gb: int = proto.Field( - proto.INT32, - number=1, - ) - node_count: int = proto.Field( - proto.INT32, - number=2, - ) - max_node_count: int = proto.Field( - proto.INT32, - number=3, - ) - - class OsImageRuntime(proto.Message): - r"""Software Runtime Configuration to run Analyze. - - Attributes: - image_version (str): - Required. Dataplex Universal Catalog Image - version. - java_libraries (MutableSequence[str]): - Optional. List of Java jars to be included in - the runtime environment. Valid input includes - Cloud Storage URIs to Jar binaries. For example, - gs://bucket-name/my/path/to/file.jar - python_packages (MutableSequence[str]): - Optional. A list of python packages to be - installed. Valid formats include Cloud Storage - URI to a PIP installable library. For example, - gs://bucket-name/my/path/to/lib.tar.gz - properties (MutableMapping[str, str]): - Optional. Spark properties to provide configuration for use - in sessions created for this environment. The properties to - set on daemon config files. Property keys are specified in - ``prefix:property`` format. The prefix must be "spark". - """ - - image_version: str = proto.Field( - proto.STRING, - number=1, - ) - java_libraries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_packages: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - compute: 'Environment.InfrastructureSpec.ComputeResources' = proto.Field( - proto.MESSAGE, - number=50, - oneof='resources', - message='Environment.InfrastructureSpec.ComputeResources', - ) - os_image: 'Environment.InfrastructureSpec.OsImageRuntime' = proto.Field( - proto.MESSAGE, - number=100, - oneof='runtime', - message='Environment.InfrastructureSpec.OsImageRuntime', - ) - - class SessionSpec(proto.Message): - r"""Configuration for sessions created for this environment. - - Attributes: - max_idle_duration (google.protobuf.duration_pb2.Duration): - Optional. The idle time configuration of the - session. The session will be auto-terminated at - the end of this period. - enable_fast_startup (bool): - Optional. If True, this causes sessions to be - pre-created and available for faster startup to - enable interactive exploration use-cases. This - defaults to False to avoid additional billed - charges. These can only be set to True for the - environment with name set to "default", and with - default configuration. - """ - - max_idle_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - enable_fast_startup: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class SessionStatus(proto.Message): - r"""Status of sessions created for this environment. - - Attributes: - active (bool): - Output only. Queries over sessions to mark - whether the environment is currently active or - not - """ - - active: bool = proto.Field( - proto.BOOL, - number=1, - ) - - class Endpoints(proto.Message): - r"""URI Endpoints to access sessions associated with the - Environment. - - Attributes: - notebooks (str): - Output only. URI to serve notebook APIs - sql (str): - Output only. URI to serve SQL APIs - """ - - notebooks: str = proto.Field( - proto.STRING, - number=1, - ) - sql: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=8, - enum=resources.State, - ) - infrastructure_spec: InfrastructureSpec = proto.Field( - proto.MESSAGE, - number=100, - message=InfrastructureSpec, - ) - session_spec: SessionSpec = proto.Field( - proto.MESSAGE, - number=101, - message=SessionSpec, - ) - session_status: SessionStatus = proto.Field( - proto.MESSAGE, - number=102, - message=SessionStatus, - ) - endpoints: Endpoints = proto.Field( - proto.MESSAGE, - number=200, - message=Endpoints, - ) - - -class Content(proto.Message): - r"""Content represents a user-visible notebook or a sql script - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the content, of - the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - uid (str): - Output only. System generated globally unique - ID for the content. This ID will be different if - the content is deleted and re-created with the - same name. - path (str): - Required. The path for the Content file, - represented as directory structure. Unique - within a lake. Limited to alphanumerics, - hyphens, underscores, dots and slashes. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Content creation time. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the content was - last updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the - content. - description (str): - Optional. Description of the content. - data_text (str): - Required. Content data in string format. - - This field is a member of `oneof`_ ``data``. - sql_script (google.cloud.dataplex_v1.types.Content.SqlScript): - Sql Script related configurations. - - This field is a member of `oneof`_ ``content``. - notebook (google.cloud.dataplex_v1.types.Content.Notebook): - Notebook related configurations. - - This field is a member of `oneof`_ ``content``. - """ - - class SqlScript(proto.Message): - r"""Configuration for the Sql Script content. - - Attributes: - engine (google.cloud.dataplex_v1.types.Content.SqlScript.QueryEngine): - Required. Query Engine to be used for the Sql - Query. - """ - class QueryEngine(proto.Enum): - r"""Query Engine Type of the SQL Script. - - Values: - QUERY_ENGINE_UNSPECIFIED (0): - Value was unspecified. - SPARK (2): - Spark SQL Query. - """ - QUERY_ENGINE_UNSPECIFIED = 0 - SPARK = 2 - - engine: 'Content.SqlScript.QueryEngine' = proto.Field( - proto.ENUM, - number=1, - enum='Content.SqlScript.QueryEngine', - ) - - class Notebook(proto.Message): - r"""Configuration for Notebook content. - - Attributes: - kernel_type (google.cloud.dataplex_v1.types.Content.Notebook.KernelType): - Required. Kernel Type of the notebook. - """ - class KernelType(proto.Enum): - r"""Kernel Type of the Jupyter notebook. - - Values: - KERNEL_TYPE_UNSPECIFIED (0): - Kernel Type unspecified. - PYTHON3 (1): - Python 3 Kernel. - """ - KERNEL_TYPE_UNSPECIFIED = 0 - PYTHON3 = 1 - - kernel_type: 'Content.Notebook.KernelType' = proto.Field( - proto.ENUM, - number=1, - enum='Content.Notebook.KernelType', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - path: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - data_text: str = proto.Field( - proto.STRING, - number=9, - oneof='data', - ) - sql_script: SqlScript = proto.Field( - proto.MESSAGE, - number=100, - oneof='content', - message=SqlScript, - ) - notebook: Notebook = proto.Field( - proto.MESSAGE, - number=101, - oneof='content', - message=Notebook, - ) - - -class Session(proto.Message): - r"""Represents an active analyze session running for a user. - - Attributes: - name (str): - Output only. The relative resource name of the content, of - the form: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}/sessions/{session_id} - user_id (str): - Output only. Email of user running the - session. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Session start time. - state (google.cloud.dataplex_v1.types.State): - Output only. State of Session - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=4, - enum=resources.State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py deleted file mode 100644 index 7d114d02aab3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/catalog.py +++ /dev/null @@ -1,3079 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'EntryView', - 'TransferStatus', - 'AspectType', - 'EntryGroup', - 'EntryType', - 'Aspect', - 'AspectSource', - 'Entry', - 'EntrySource', - 'CreateEntryGroupRequest', - 'UpdateEntryGroupRequest', - 'DeleteEntryGroupRequest', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'GetEntryGroupRequest', - 'CreateEntryTypeRequest', - 'UpdateEntryTypeRequest', - 'DeleteEntryTypeRequest', - 'ListEntryTypesRequest', - 'ListEntryTypesResponse', - 'GetEntryTypeRequest', - 'CreateAspectTypeRequest', - 'UpdateAspectTypeRequest', - 'DeleteAspectTypeRequest', - 'ListAspectTypesRequest', - 'ListAspectTypesResponse', - 'GetAspectTypeRequest', - 'CreateEntryRequest', - 'UpdateEntryRequest', - 'DeleteEntryRequest', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'GetEntryRequest', - 'LookupEntryRequest', - 'SearchEntriesRequest', - 'SearchEntriesResult', - 'SearchEntriesResponse', - 'ImportItem', - 'CreateMetadataJobRequest', - 'GetMetadataJobRequest', - 'ListMetadataJobsRequest', - 'ListMetadataJobsResponse', - 'CancelMetadataJobRequest', - 'MetadataJob', - 'EntryLink', - 'CreateEntryLinkRequest', - 'DeleteEntryLinkRequest', - 'GetEntryLinkRequest', - }, -) - - -class EntryView(proto.Enum): - r"""View for controlling which parts of an entry are to be - returned. - - Values: - ENTRY_VIEW_UNSPECIFIED (0): - Unspecified EntryView. Defaults to FULL. - BASIC (1): - Returns entry only, without aspects. - FULL (2): - Returns all required aspects as well as the - keys of all non-required aspects. - CUSTOM (3): - Returns aspects matching custom fields in - GetEntryRequest. If the number of aspects - exceeds 100, the first 100 will be returned. - ALL (4): - Returns all aspects. If the number of aspects - exceeds 100, the first 100 will be returned. - """ - ENTRY_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - CUSTOM = 3 - ALL = 4 - - -class TransferStatus(proto.Enum): - r"""Denotes the transfer status of a resource. It is unspecified - for resources created from Dataplex API. - - Values: - TRANSFER_STATUS_UNSPECIFIED (0): - The default value. It is set for resources - that were not subject for migration from Data - Catalog service. - TRANSFER_STATUS_MIGRATED (1): - Indicates that a resource was migrated from - Data Catalog service but it hasn't been - transferred yet. In particular the resource - cannot be updated from Dataplex API. - TRANSFER_STATUS_TRANSFERRED (2): - Indicates that a resource was transferred - from Data Catalog service. The resource can only - be updated from Dataplex API. - """ - TRANSFER_STATUS_UNSPECIFIED = 0 - TRANSFER_STATUS_MIGRATED = 1 - TRANSFER_STATUS_TRANSFERRED = 2 - - -class AspectType(proto.Message): - r"""AspectType is a template for creating Aspects, and represents - the JSON-schema for a given Entry, for example, BigQuery Table - Schema. - - Attributes: - name (str): - Output only. The relative resource name of the AspectType, - of the form: - projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. - uid (str): - Output only. System generated globally unique - ID for the AspectType. If you delete and - recreate the AspectType with the same name, then - this ID will be different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the AspectType was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the AspectType was - last updated. - description (str): - Optional. Description of the AspectType. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - AspectType. - etag (str): - The service computes this checksum. The - client may send it on update and delete requests - to ensure it has an up-to-date value before - proceeding. - authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): - Immutable. Defines the Authorization for this - type. - metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Required. MetadataTemplate of the aspect. - transfer_status (google.cloud.dataplex_v1.types.TransferStatus): - Output only. Denotes the transfer status of - the Aspect Type. It is unspecified for Aspect - Types created from Dataplex API. - """ - - class Authorization(proto.Message): - r"""Authorization for an AspectType. - - Attributes: - alternate_use_permission (str): - Immutable. The IAM permission grantable on - the EntryGroup to allow access to instantiate - Aspects of Dataplex Universal Catalog owned - AspectTypes, only settable for Dataplex - Universal Catalog owned Types. - """ - - alternate_use_permission: str = proto.Field( - proto.STRING, - number=1, - ) - - class MetadataTemplate(proto.Message): - r"""MetadataTemplate definition for an AspectType. - - Attributes: - index (int): - Optional. Index is used to encode Template - messages. The value of index can range between 1 - and 2,147,483,647. Index must be unique within - all fields in a Template. (Nested Templates can - reuse indexes). Once a Template is defined, the - index cannot be changed, because it identifies - the field in the actual storage format. Index is - a mandatory field, but it is optional for top - level fields, and map/array "values" - definitions. - name (str): - Required. The name of the field. - type_ (str): - Required. The datatype of this field. The following values - are supported: - - Primitive types: - - - string - - int - - bool - - double - - datetime. Must be of the format RFC3339 UTC "Zulu" - (Examples: "2014-10-02T15:01:23Z" and - "2014-10-02T15:01:23.045123456Z"). - - Complex types: - - - enum - - array - - map - - record - record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): - Optional. Field definition. You must specify - it if the type is record. It defines the nested - fields. - enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): - Optional. The list of values for an enum - type. You must define it if the type is enum. - map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. If the type is map, set map_items. map_items can - refer to a primitive field or a complex (record only) field. - To specify a primitive field, you only need to set name and - type in the nested MetadataTemplate. The recommended value - for the name field is item, as this isn't used in the actual - payload. - array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. If the type is array, set array_items. array_items - can refer to a primitive field or a complex (record only) - field. To specify a primitive field, you only need to set - name and type in the nested MetadataTemplate. The - recommended value for the name field is item, as this isn't - used in the actual payload. - type_id (str): - Optional. You can use type id if this - definition of the field needs to be reused - later. The type id must be unique across the - entire template. You can only specify it if the - field type is record. - type_ref (str): - Optional. A reference to another field - definition (not an inline definition). The value - must be equal to the value of an id field - defined elsewhere in the MetadataTemplate. Only - fields with record type can refer to other - fields. - constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): - Optional. Specifies the constraints on this - field. - annotations (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Annotations): - Optional. Specifies annotations on this - field. - """ - - class EnumValue(proto.Message): - r"""Definition of Enumvalue, to be used for enum fields. - - Attributes: - index (int): - Required. Index for the enum value. It can't - be modified. - name (str): - Required. Name of the enumvalue. This is the - actual value that the aspect can contain. - deprecated (str): - Optional. You can set this message if you - need to deprecate an enum value. - """ - - index: int = proto.Field( - proto.INT32, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - deprecated: str = proto.Field( - proto.STRING, - number=3, - ) - - class Constraints(proto.Message): - r"""Definition of the constraints of a field. - - Attributes: - required (bool): - Optional. Marks this field as optional or - required. - """ - - required: bool = proto.Field( - proto.BOOL, - number=1, - ) - - class Annotations(proto.Message): - r"""Definition of the annotations of a field. - - Attributes: - deprecated (str): - Optional. Marks a field as deprecated. You - can include a deprecation message. - display_name (str): - Optional. Display name for a field. - description (str): - Optional. Description for a field. - display_order (int): - Optional. Display order for a field. You can - use this to reorder where a field is rendered. - string_type (str): - Optional. You can use String Type annotations to specify - special meaning to string fields. The following values are - supported: - - - richText: The field must be interpreted as a rich text - field. - - url: A fully qualified URL link. - - resource: A service qualified resource reference. - string_values (MutableSequence[str]): - Optional. Suggested hints for string fields. - You can use them to suggest values to users - through console. - """ - - deprecated: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - display_order: int = proto.Field( - proto.INT32, - number=4, - ) - string_type: str = proto.Field( - proto.STRING, - number=6, - ) - string_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - - index: int = proto.Field( - proto.INT32, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=5, - ) - record_fields: MutableSequence['AspectType.MetadataTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AspectType.MetadataTemplate', - ) - enum_values: MutableSequence['AspectType.MetadataTemplate.EnumValue'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='AspectType.MetadataTemplate.EnumValue', - ) - map_items: 'AspectType.MetadataTemplate' = proto.Field( - proto.MESSAGE, - number=10, - message='AspectType.MetadataTemplate', - ) - array_items: 'AspectType.MetadataTemplate' = proto.Field( - proto.MESSAGE, - number=11, - message='AspectType.MetadataTemplate', - ) - type_id: str = proto.Field( - proto.STRING, - number=12, - ) - type_ref: str = proto.Field( - proto.STRING, - number=13, - ) - constraints: 'AspectType.MetadataTemplate.Constraints' = proto.Field( - proto.MESSAGE, - number=50, - message='AspectType.MetadataTemplate.Constraints', - ) - annotations: 'AspectType.MetadataTemplate.Annotations' = proto.Field( - proto.MESSAGE, - number=51, - message='AspectType.MetadataTemplate.Annotations', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - authorization: Authorization = proto.Field( - proto.MESSAGE, - number=52, - message=Authorization, - ) - metadata_template: MetadataTemplate = proto.Field( - proto.MESSAGE, - number=53, - message=MetadataTemplate, - ) - transfer_status: 'TransferStatus' = proto.Field( - proto.ENUM, - number=202, - enum='TransferStatus', - ) - - -class EntryGroup(proto.Message): - r"""An Entry Group represents a logical grouping of one or more - Entries. - - Attributes: - name (str): - Output only. The relative resource name of the EntryGroup, - in the format - projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. - uid (str): - Output only. System generated globally unique - ID for the EntryGroup. If you delete and - recreate the EntryGroup with the same name, this - ID will be different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryGroup was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryGroup was - last updated. - description (str): - Optional. Description of the EntryGroup. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - EntryGroup. - etag (str): - This checksum is computed by the service, and - might be sent on update and delete requests to - ensure the client has an up-to-date value before - proceeding. - transfer_status (google.cloud.dataplex_v1.types.TransferStatus): - Output only. Denotes the transfer status of - the Entry Group. It is unspecified for Entry - Group created from Dataplex API. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - transfer_status: 'TransferStatus' = proto.Field( - proto.ENUM, - number=202, - enum='TransferStatus', - ) - - -class EntryType(proto.Message): - r"""Entry Type is a template for creating Entries. - - Attributes: - name (str): - Output only. The relative resource name of the EntryType, of - the form: - projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}. - uid (str): - Output only. System generated globally unique - ID for the EntryType. This ID will be different - if the EntryType is deleted and re-created with - the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryType was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the EntryType was - last updated. - description (str): - Optional. Description of the EntryType. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - EntryType. - etag (str): - Optional. This checksum is computed by the - service, and might be sent on update and delete - requests to ensure the client has an up-to-date - value before proceeding. - type_aliases (MutableSequence[str]): - Optional. Indicates the classes this Entry - Type belongs to, for example, TABLE, DATABASE, - MODEL. - platform (str): - Optional. The platform that Entries of this - type belongs to. - system (str): - Optional. The system that Entries of this - type belongs to. Examples include CloudSQL, - MariaDB etc - required_aspects (MutableSequence[google.cloud.dataplex_v1.types.EntryType.AspectInfo]): - AspectInfo for the entry type. - authorization (google.cloud.dataplex_v1.types.EntryType.Authorization): - Immutable. Authorization defined for this - type. - """ - - class AspectInfo(proto.Message): - r""" - - Attributes: - type_ (str): - Required aspect type for the entry type. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - - class Authorization(proto.Message): - r"""Authorization for an Entry Type. - - Attributes: - alternate_use_permission (str): - Immutable. The IAM permission grantable on - the Entry Group to allow access to instantiate - Entries of Dataplex Universal Catalog owned - Entry Types, only settable for Dataplex - Universal Catalog owned Types. - """ - - alternate_use_permission: str = proto.Field( - proto.STRING, - number=1, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - type_aliases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - platform: str = proto.Field( - proto.STRING, - number=10, - ) - system: str = proto.Field( - proto.STRING, - number=11, - ) - required_aspects: MutableSequence[AspectInfo] = proto.RepeatedField( - proto.MESSAGE, - number=50, - message=AspectInfo, - ) - authorization: Authorization = proto.Field( - proto.MESSAGE, - number=51, - message=Authorization, - ) - - -class Aspect(proto.Message): - r"""An aspect is a single piece of metadata describing an entry. - - Attributes: - aspect_type (str): - Output only. The resource name of the type - used to create this Aspect. - path (str): - Output only. The path in the entry under - which the aspect is attached. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Aspect was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Aspect was - last updated. - data (google.protobuf.struct_pb2.Struct): - Required. The content of the aspect, - according to its aspect type schema. The maximum - size of the field is 120KB (encoded as UTF-8). - aspect_source (google.cloud.dataplex_v1.types.AspectSource): - Optional. Information related to the source - system of the aspect. - """ - - aspect_type: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - data: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=8, - message=struct_pb2.Struct, - ) - aspect_source: 'AspectSource' = proto.Field( - proto.MESSAGE, - number=9, - message='AspectSource', - ) - - -class AspectSource(proto.Message): - r"""Information related to the source system of the aspect. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the aspect was created in the source - system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time the aspect was last updated in the - source system. - data_version (str): - The version of the data format used to - produce this data. This field is used to - indicated when the underlying data format - changes (e.g., schema modifications, changes to - the source URL format definition, etc). - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - data_version: str = proto.Field( - proto.STRING, - number=12, - ) - - -class Entry(proto.Message): - r"""An entry is a representation of a data resource that can be - described by various metadata. - - Attributes: - name (str): - Identifier. The relative resource name of the entry, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. - entry_type (str): - Required. Immutable. The relative resource name of the entry - type that was used to create this entry, in the format - ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entry was - created in Dataplex Universal Catalog. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entry was last - updated in Dataplex Universal Catalog. - aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): - Optional. The aspects that are attached to the entry. - Depending on how the aspect is attached to the entry, the - format of the aspect key can be one of the following: - - - If the aspect is attached directly to the entry: - ``{project_id_or_number}.{location_id}.{aspect_type_id}`` - - If the aspect is attached to an entry's path: - ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` - parent_entry (str): - Optional. Immutable. The resource name of the parent entry, - in the format - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. - fully_qualified_name (str): - Optional. A name for the entry that can be referenced by an - external system. For more information, see `Fully qualified - names `__. - The maximum size of the field is 4000 characters. - entry_source (google.cloud.dataplex_v1.types.EntrySource): - Optional. Information related to the source - system of the data resource that is represented - by the entry. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - entry_type: str = proto.Field( - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - aspects: MutableMapping[str, 'Aspect'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=9, - message='Aspect', - ) - parent_entry: str = proto.Field( - proto.STRING, - number=10, - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=12, - ) - entry_source: 'EntrySource' = proto.Field( - proto.MESSAGE, - number=15, - message='EntrySource', - ) - - -class EntrySource(proto.Message): - r"""Information related to the source system of the data resource - that is represented by the entry. - - Attributes: - resource (str): - The name of the resource in the source - system. Maximum length is 4,000 characters. - system (str): - The name of the source system. - Maximum length is 64 characters. - platform (str): - The platform containing the source system. - Maximum length is 64 characters. - display_name (str): - A user-friendly display name. - Maximum length is 500 characters. - description (str): - A description of the data resource. - Maximum length is 2,000 characters. - labels (MutableMapping[str, str]): - User-defined labels. - The maximum size of keys and values is 128 - characters each. - ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): - Immutable. The entries representing the - ancestors of the data resource in the source - system. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the resource was created in the - source system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the resource was last updated in the source - system. If the entry exists in the system and its - ``EntrySource`` has ``update_time`` populated, further - updates to the ``EntrySource`` of the entry must provide - incremental updates to its ``update_time``. - location (str): - Output only. Location of the resource in the - source system. You can search the entry by this - location. By default, this should match the - location of the entry group containing this - entry. A different value allows capturing the - source location for data external to Google - Cloud. - """ - - class Ancestor(proto.Message): - r"""Information about individual items in the hierarchy that is - associated with the data resource. - - Attributes: - name (str): - Optional. The name of the ancestor resource. - type_ (str): - Optional. The type of the ancestor resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - system: str = proto.Field( - proto.STRING, - number=2, - ) - platform: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - description: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - ancestors: MutableSequence[Ancestor] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=Ancestor, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - location: str = proto.Field( - proto.STRING, - number=12, - ) - - -class CreateEntryGroupRequest(proto.Message): - r"""Create EntryGroup Request. - - Attributes: - parent (str): - Required. The resource name of the entryGroup, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - entry_group_id (str): - Required. EntryGroup identifier. - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_group_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=3, - message='EntryGroup', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntryGroupRequest(proto.Message): - r"""Update EntryGroup Request. - - Attributes: - entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. The service validates the request, - without performing any mutations. The default is - false. - """ - - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntryGroupRequest(proto.Message): - r"""Delete EntryGroup Request. - - Attributes: - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteEntryGroupRequest method returns an - ABORTED error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntryGroupsRequest(proto.Message): - r"""List entryGroups request. - - Attributes: - parent (str): - Required. The resource name of the entryGroup location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of EntryGroups to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 EntryGroups. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEntryGroups`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provide to ``ListEntryGroups`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntryGroupsResponse(proto.Message): - r"""List entry groups response. - - Attributes: - entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): - Entry groups under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetEntryGroupRequest(proto.Message): - r"""Get EntryGroup request. - - Attributes: - name (str): - Required. The resource name of the EntryGroup: - ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEntryTypeRequest(proto.Message): - r"""Create EntryType Request. - - Attributes: - parent (str): - Required. The resource name of the EntryType, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - entry_type_id (str): - Required. EntryType identifier. - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_type_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry_type: 'EntryType' = proto.Field( - proto.MESSAGE, - number=3, - message='EntryType', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntryTypeRequest(proto.Message): - r"""Update EntryType Request. - - Attributes: - entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - entry_type: 'EntryType' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryType', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntryTypeRequest(proto.Message): - r"""Delete EntryType Request. - - Attributes: - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteEntryTypeRequest method returns an ABORTED - error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntryTypesRequest(proto.Message): - r"""List EntryTypes request - - Attributes: - parent (str): - Required. The resource name of the EntryType location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of EntryTypes to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 EntryTypes. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEntryTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provided to ``ListEntryTypes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - labels.key1 = "value1" - - labels:key1 - - name = "value" - - These restrictions can be conjoined with AND, OR, and NOT - conjunctions. - order_by (str): - Optional. Orders the result by ``name`` or ``create_time`` - fields. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntryTypesResponse(proto.Message): - r"""List EntryTypes response. - - Attributes: - entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): - EntryTypes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - entry_types: MutableSequence['EntryType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetEntryTypeRequest(proto.Message): - r"""Get EntryType request. - - Attributes: - name (str): - Required. The resource name of the EntryType: - ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAspectTypeRequest(proto.Message): - r"""Create AspectType Request. - - Attributes: - parent (str): - Required. The resource name of the AspectType, of the form: - projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a Google Cloud region. - aspect_type_id (str): - Required. AspectType identifier. - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - aspect_type_id: str = proto.Field( - proto.STRING, - number=2, - ) - aspect_type: 'AspectType' = proto.Field( - proto.MESSAGE, - number=3, - message='AspectType', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateAspectTypeRequest(proto.Message): - r"""Update AspectType Request - - Attributes: - aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - aspect_type: 'AspectType' = proto.Field( - proto.MESSAGE, - number=1, - message='AspectType', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteAspectTypeRequest(proto.Message): - r"""Delete AspectType Request. - - Attributes: - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteAspectTypeRequest method returns an - ABORTED error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListAspectTypesRequest(proto.Message): - r"""List AspectTypes request. - - Attributes: - parent (str): - Required. The resource name of the AspectType location, of - the form: - ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of AspectTypes to - return. The service may return fewer than this - value. If unspecified, the service returns at - most 10 AspectTypes. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListAspectTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters you - provide to ``ListAspectTypes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - labels.key1 = "value1" - - labels:key1 - - name = "value" - - These restrictions can be conjoined with AND, OR, and NOT - conjunctions. - order_by (str): - Optional. Orders the result by ``name`` or ``create_time`` - fields. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAspectTypesResponse(proto.Message): - r"""List AspectTypes response. - - Attributes: - aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): - AspectTypes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - aspect_types: MutableSequence['AspectType'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AspectType', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetAspectTypeRequest(proto.Message): - r"""Get AspectType request. - - Attributes: - name (str): - Required. The resource name of the AspectType: - ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEntryRequest(proto.Message): - r"""Create Entry request. - - Attributes: - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - entry_id (str): - Required. Entry identifier. It has to be unique within an - Entry Group. - - Entries corresponding to Google Cloud resources use an Entry - ID format based on `full resource - names `__. - The format is a full resource name of the resource without - the prefix double slashes in the API service name part of - the full resource name. This allows retrieval of entries - using their associated resource name. - - For example, if the full resource name of a resource is - ``//library.googleapis.com/shelves/shelf1/books/book2``, - then the suggested entry_id is - ``library.googleapis.com/shelves/shelf1/books/book2``. - - It is also suggested to follow the same convention for - entries corresponding to resources from providers or systems - other than Google Cloud. - - The maximum size of the field is 4000 characters. - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=3, - message='Entry', - ) - - -class UpdateEntryRequest(proto.Message): - r"""Update Entry request. - - Attributes: - entry (google.cloud.dataplex_v1.types.Entry): - Required. Entry resource. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. To update Aspects, the - update_mask must contain the value "aspects". - - If the update_mask is empty, the service will update all - modifiable fields present in the request. - allow_missing (bool): - Optional. If set to true and the entry - doesn't exist, the service will create it. - delete_missing_aspects (bool): - Optional. If set to true and the aspect_keys specify aspect - ranges, the service deletes any existing aspects from that - range that weren't provided in the request. - aspect_keys (MutableSequence[str]): - Optional. The map keys of the Aspects which the service - should modify. It supports the following syntaxes: - - - ```` - matches an aspect of the - given type and empty path. - - ``@path`` - matches an aspect of - the given type and specified path. For example, to attach - an aspect to a field that is specified by the ``schema`` - aspect, the path should have the format - ``Schema.``. - - ``@*`` - matches aspects of the - given type for all paths. - - ``*@path`` - matches aspects of all types on the given - path. - - The service will not remove existing aspects matching the - syntax unless ``delete_missing_aspects`` is set to true. - - If this field is left empty, the service treats it as - specifying exactly those Aspects present in the request. - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - delete_missing_aspects: bool = proto.Field( - proto.BOOL, - number=4, - ) - aspect_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class DeleteEntryRequest(proto.Message): - r"""Delete Entry request. - - Attributes: - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListEntriesRequest(proto.Message): - r"""List Entries request. - - Attributes: - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. - page_size (int): - Optional. Number of items to return per page. If there are - remaining results, the service returns a next_page_token. If - unspecified, the service returns at most 10 Entries. The - maximum value is 100; values above 100 will be coerced to - 100. - page_token (str): - Optional. Page token received from a previous - ``ListEntries`` call. Provide this to retrieve the - subsequent page. - filter (str): - Optional. A filter on the entries to return. Filters are - case-sensitive. You can filter the request by the following - fields: - - - entry_type - - entry_source.display_name - - The comparison operators are =, !=, <, >, <=, >=. The - service compares strings according to lexical order. - - You can use the logical operators AND, OR, NOT in the - filter. - - You can use Wildcard "\*", but for entry_type you need to - provide the full project id or number. - - Example filter expressions: - - - "entry_source.display_name=AnExampleDisplayName" - - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - - "entry_type=projects/example-project/locations/us/entryTypes/a\* - OR entry_type=projects/another-project/locations/\*" - - "NOT entry_source.display_name=AnotherExampleDisplayName". - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListEntriesResponse(proto.Message): - r"""List Entries response. - - Attributes: - entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): - The list of entries under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - entries: MutableSequence['Entry'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entry', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEntryRequest(proto.Message): - r"""Get Entry request. - - Attributes: - name (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - view (google.cloud.dataplex_v1.types.EntryView): - Optional. View to control which parts of an - entry the service should return. - aspect_types (MutableSequence[str]): - Optional. Limits the aspects returned to the - provided aspect types. It only works for CUSTOM - view. - paths (MutableSequence[str]): - Optional. Limits the aspects returned to - those associated with the provided paths within - the Entry. It only works for CUSTOM view. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'EntryView' = proto.Field( - proto.ENUM, - number=2, - enum='EntryView', - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class LookupEntryRequest(proto.Message): - r"""Lookup Entry request using permissions in the source system. - - Attributes: - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/{location}``. - view (google.cloud.dataplex_v1.types.EntryView): - Optional. View to control which parts of an - entry the service should return. - aspect_types (MutableSequence[str]): - Optional. Limits the aspects returned to the - provided aspect types. It only works for CUSTOM - view. - paths (MutableSequence[str]): - Optional. Limits the aspects returned to - those associated with the provided paths within - the Entry. It only works for CUSTOM view. - entry (str): - Required. The resource name of the Entry: - ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'EntryView' = proto.Field( - proto.ENUM, - number=2, - enum='EntryView', - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - entry: str = proto.Field( - proto.STRING, - number=5, - ) - - -class SearchEntriesRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The project to which the request should be - attributed in the following form: - ``projects/{project}/locations/global``. - query (str): - Required. The query against which entries in scope should be - matched. The query syntax is defined in `Search syntax for - Dataplex Universal - Catalog `__. - page_size (int): - Optional. Number of results in the search page. If <=0, then - defaults to 10. Max limit for page_size is 1000. Throws an - invalid argument for page_size > 1000. - page_token (str): - Optional. Page token received from a previous - ``SearchEntries`` call. Provide this to retrieve the - subsequent page. - order_by (str): - Optional. Specifies the ordering of results. Supported - values are: - - - ``relevance`` - - ``last_modified_timestamp`` - - ``last_modified_timestamp asc`` - scope (str): - Optional. The scope under which the search should be - operating. It must either be ``organizations/`` or - ``projects/``. If it is unspecified, it - defaults to the organization where the project provided in - ``name`` is located. - semantic_search (bool): - Optional. Specifies whether the search should - understand the meaning and intent behind the - query, rather than just matching keywords. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - scope: str = proto.Field( - proto.STRING, - number=7, - ) - semantic_search: bool = proto.Field( - proto.BOOL, - number=11, - ) - - -class SearchEntriesResult(proto.Message): - r"""A single result of a SearchEntries request. - - Attributes: - linked_resource (str): - Linked resource name. - dataplex_entry (google.cloud.dataplex_v1.types.Entry): - - snippets (google.cloud.dataplex_v1.types.SearchEntriesResult.Snippets): - Snippets. - """ - - class Snippets(proto.Message): - r"""Snippets for the entry, contains HTML-style highlighting for - matched tokens, will be used in UI. - - Attributes: - dataplex_entry (google.cloud.dataplex_v1.types.Entry): - Entry - """ - - dataplex_entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - - linked_resource: str = proto.Field( - proto.STRING, - number=8, - ) - dataplex_entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=9, - message='Entry', - ) - snippets: Snippets = proto.Field( - proto.MESSAGE, - number=12, - message=Snippets, - ) - - -class SearchEntriesResponse(proto.Message): - r""" - - Attributes: - results (MutableSequence[google.cloud.dataplex_v1.types.SearchEntriesResult]): - The results matching the search query. - total_size (int): - The estimated total number of matching - entries. This number isn't guaranteed to be - accurate. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that the service couldn't reach. - Search results don't include data from these - locations. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence['SearchEntriesResult'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SearchEntriesResult', - ) - total_size: int = proto.Field( - proto.INT32, - number=2, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class ImportItem(proto.Message): - r"""An object that describes the values that you want to set for an - entry and its attached aspects when you import metadata. Used when - you run a metadata import job. See - [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. - - You provide a collection of import items in a metadata import file. - For more information about how to create a metadata import file, see - `Metadata import - file `__. - - Attributes: - entry (google.cloud.dataplex_v1.types.Entry): - Information about an entry and its attached - aspects. - entry_link (google.cloud.dataplex_v1.types.EntryLink): - Information about the entry link. User should provide either - one of the entry or entry_link. While providing entry_link, - user should not provide update_mask and aspect_keys. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update, in paths that are relative to the - ``Entry`` resource. Separate each field with a comma. - - In ``FULL`` entry sync mode, Dataplex Universal Catalog - includes the paths of all of the fields for an entry that - can be modified, including aspects. This means that Dataplex - Universal Catalog replaces the existing entry with the entry - in the metadata import file. All modifiable fields are - updated, regardless of the fields that are listed in the - update mask, and regardless of whether a field is present in - the ``entry`` object. - - The ``update_mask`` field is ignored when an entry is - created or re-created. - - In an aspect-only metadata job (when entry sync mode is - ``NONE``), set this value to ``aspects``. - - Dataplex Universal Catalog also determines which entries and - aspects to modify by comparing the values and timestamps - that you provide in the metadata import file with the values - and timestamps that exist in your project. For more - information, see `Comparison - logic `__. - aspect_keys (MutableSequence[str]): - The aspects to modify. Supports the following syntaxes: - - - ``{aspect_type_reference}``: matches aspects that belong - to the specified aspect type and are attached directly to - the entry. - - ``{aspect_type_reference}@{path}``: matches aspects that - belong to the specified aspect type and path. - - ``{aspect_type_reference}@*`` : matches aspects of the - given type for all paths. - - ``*@path`` : matches aspects of all types on the given - path. - - Replace ``{aspect_type_reference}`` with a reference to the - aspect type, in the format - ``{project_id_or_number}.{location_id}.{aspect_type_id}``. - - In ``FULL`` entry sync mode, if you leave this field empty, - it is treated as specifying exactly those aspects that are - present within the specified entry. Dataplex Universal - Catalog implicitly adds the keys for all of the required - aspects of an entry. - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - entry_link: 'EntryLink' = proto.Field( - proto.MESSAGE, - number=4, - message='EntryLink', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - aspect_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CreateMetadataJobRequest(proto.Message): - r"""Create metadata job request. - - Attributes: - parent (str): - Required. The resource name of the parent location, in the - format - ``projects/{project_id_or_number}/locations/{location_id}`` - metadata_job (google.cloud.dataplex_v1.types.MetadataJob): - Required. The metadata job resource. - metadata_job_id (str): - Optional. The metadata job ID. If not provided, a unique ID - is generated with the prefix ``metadata-job-``. - validate_only (bool): - Optional. The service validates the request - without performing any mutations. The default is - false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - metadata_job: 'MetadataJob' = proto.Field( - proto.MESSAGE, - number=2, - message='MetadataJob', - ) - metadata_job_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class GetMetadataJobRequest(proto.Message): - r"""Get metadata job request. - - Attributes: - name (str): - Required. The resource name of the metadata job, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListMetadataJobsRequest(proto.Message): - r"""List metadata jobs request. - - Attributes: - parent (str): - Required. The resource name of the parent location, in the - format - ``projects/{project_id_or_number}/locations/{location_id}`` - page_size (int): - Optional. The maximum number of metadata jobs - to return. The service might return fewer jobs - than this value. If unspecified, at most 10 jobs - are returned. The maximum value is 1,000. - page_token (str): - Optional. The page token received from a previous - ``ListMetadataJobs`` call. Provide this token to retrieve - the subsequent page of results. When paginating, all other - parameters that are provided to the ``ListMetadataJobs`` - request must match the call that provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - service supports the following formats: - - - ``labels.key1 = "value1"`` - - ``labels:key1`` - - ``name = "value"`` - - You can combine filters with ``AND``, ``OR``, and ``NOT`` - operators. - order_by (str): - Optional. The field to sort the results by, either ``name`` - or ``create_time``. If not specified, the ordering is - undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListMetadataJobsResponse(proto.Message): - r"""List metadata jobs response. - - Attributes: - metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): - Metadata jobs under the specified parent - location. - next_page_token (str): - A token to retrieve the next page of results. - If there are no more results in the list, the - value is empty. - unreachable_locations (MutableSequence[str]): - Locations that the service couldn't reach. - """ - - @property - def raw_page(self): - return self - - metadata_jobs: MutableSequence['MetadataJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='MetadataJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CancelMetadataJobRequest(proto.Message): - r"""Cancel metadata job request. - - Attributes: - name (str): - Required. The resource name of the job, in the format - ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class MetadataJob(proto.Message): - r"""A metadata job resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. The name of the resource that the - configuration is applied to, in the format - ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. - uid (str): - Output only. A system-generated, globally - unique ID for the metadata job. If the metadata - job is deleted and then re-created with the same - name, this ID is different. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the metadata job - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the metadata job - was updated. - labels (MutableMapping[str, str]): - Optional. User-defined labels. - type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): - Required. Metadata job type. - import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): - Import job specification. - - This field is a member of `oneof`_ ``spec``. - export_spec (google.cloud.dataplex_v1.types.MetadataJob.ExportJobSpec): - Export job specification. - - This field is a member of `oneof`_ ``spec``. - import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): - Output only. Import job result. - - This field is a member of `oneof`_ ``result``. - export_result (google.cloud.dataplex_v1.types.MetadataJob.ExportJobResult): - Output only. Export job result. - - This field is a member of `oneof`_ ``result``. - status (google.cloud.dataplex_v1.types.MetadataJob.Status): - Output only. Metadata job status. - """ - class Type(proto.Enum): - r"""Metadata job type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - IMPORT (1): - Import job. - EXPORT (2): - Export job. - """ - TYPE_UNSPECIFIED = 0 - IMPORT = 1 - EXPORT = 2 - - class ImportJobResult(proto.Message): - r"""Results from a metadata import job. - - Attributes: - deleted_entries (int): - Output only. The total number of entries that - were deleted. - updated_entries (int): - Output only. The total number of entries that - were updated. - created_entries (int): - Output only. The total number of entries that - were created. - unchanged_entries (int): - Output only. The total number of entries that - were unchanged. - recreated_entries (int): - Output only. The total number of entries that - were recreated. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the status was - updated. - deleted_entry_links (int): - Output only. The total number of entry links - that were successfully deleted. - created_entry_links (int): - Output only. The total number of entry links - that were successfully created. - unchanged_entry_links (int): - Output only. The total number of entry links - that were left unchanged. - """ - - deleted_entries: int = proto.Field( - proto.INT64, - number=1, - ) - updated_entries: int = proto.Field( - proto.INT64, - number=2, - ) - created_entries: int = proto.Field( - proto.INT64, - number=3, - ) - unchanged_entries: int = proto.Field( - proto.INT64, - number=4, - ) - recreated_entries: int = proto.Field( - proto.INT64, - number=6, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deleted_entry_links: int = proto.Field( - proto.INT64, - number=7, - ) - created_entry_links: int = proto.Field( - proto.INT64, - number=8, - ) - unchanged_entry_links: int = proto.Field( - proto.INT64, - number=9, - ) - - class ExportJobResult(proto.Message): - r"""Summary results from a metadata export job. The results are a - snapshot of the metadata at the time when the job was created. - The exported entries are saved to a Cloud Storage bucket. - - Attributes: - exported_entries (int): - Output only. The number of entries that were - exported. - error_message (str): - Output only. The error message if the - metadata export job failed. - """ - - exported_entries: int = proto.Field( - proto.INT64, - number=1, - ) - error_message: str = proto.Field( - proto.STRING, - number=2, - ) - - class ImportJobSpec(proto.Message): - r"""Job specification for a metadata import job. - - You can run the following kinds of metadata import jobs: - - - Full sync of entries with incremental import of their aspects. - Supported for custom entries. - - Incremental import of aspects only. Supported for aspects that - belong to custom entries and system entries. For custom entries, - you can modify both optional aspects and required aspects. For - system entries, you can modify optional aspects. - - Attributes: - source_storage_uri (str): - Optional. The URI of a Cloud Storage bucket or folder - (beginning with ``gs://`` and ending with ``/``) that - contains the metadata import files for this job. - - A metadata import file defines the values to set for each of - the entries and aspects in a metadata import job. For more - information about how to create a metadata import file and - the file requirements, see `Metadata import - file `__. - - You can provide multiple metadata import files in the same - metadata job. The bucket or folder must contain at least one - metadata import file, in JSON Lines format (either ``.json`` - or ``.jsonl`` file extension). - - In ``FULL`` entry sync mode, don't save the metadata import - file in a folder named ``SOURCE_STORAGE_URI/deletions/``. - - **Caution**: If the metadata import file contains no data, - all entries and aspects that belong to the job's scope are - deleted. - source_create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the process that - created the metadata import files began. - scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): - Required. A boundary on the scope of impact - that the metadata import job can have. - entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): - Required. The sync mode for entries. - aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): - Required. The sync mode for aspects. - log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): - Optional. The level of logs to write to Cloud Logging for - this job. - - Debug-level logs provide highly-detailed information for - troubleshooting, but their increased verbosity could incur - `additional - costs `__ that - might not be merited for all jobs. - - If unspecified, defaults to ``INFO``. - """ - class SyncMode(proto.Enum): - r"""Specifies how the entries and aspects in a metadata import job are - updated. For more information, see `Sync - mode `__. - - Values: - SYNC_MODE_UNSPECIFIED (0): - Sync mode unspecified. - FULL (1): - All resources in the job's scope are - modified. If a resource exists in Dataplex - Universal Catalog but isn't included in the - metadata import file, the resource is deleted - when you run the metadata job. Use this mode to - perform a full sync of the set of entries in the - job scope. - - This sync mode is supported for entries. - INCREMENTAL (2): - Only the resources that are explicitly - included in the metadata import file are - modified. Use this mode to modify a subset of - resources while leaving unreferenced resources - unchanged. - - This sync mode is supported for aspects. - NONE (3): - If entry sync mode is ``NONE``, then aspects are modified - according to the aspect sync mode. Other metadata that - belongs to entries in the job's scope isn't modified. - - This sync mode is supported for entries. - """ - SYNC_MODE_UNSPECIFIED = 0 - FULL = 1 - INCREMENTAL = 2 - NONE = 3 - - class LogLevel(proto.Enum): - r"""The level of logs to write to Cloud Logging for this job. - - Values: - LOG_LEVEL_UNSPECIFIED (0): - Log level unspecified. - DEBUG (1): - Debug-level logging. Captures detailed logs for each import - item. Use debug-level logging to troubleshoot issues with - specific import items. For example, use debug-level logging - to identify resources that are missing from the job scope, - entries or aspects that don't conform to the associated - entry type or aspect type, or other misconfigurations with - the metadata import file. - - Depending on the size of your metadata job and the number of - logs that are generated, debug-level logging might incur - `additional - costs `__. - INFO (2): - Info-level logging. Captures logs at the - overall job level. Includes aggregate logs about - import items, but doesn't specify which import - item has an error. - """ - LOG_LEVEL_UNSPECIFIED = 0 - DEBUG = 1 - INFO = 2 - - class ImportJobScope(proto.Message): - r"""A boundary on the scope of impact that the metadata import - job can have. - - Attributes: - entry_groups (MutableSequence[str]): - Required. The entry group that is in scope for the import - job, specified as a relative resource name in the format - ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. - Only entries and aspects that belong to the specified entry - group are affected by the job. - - Must contain exactly one element. The entry group and the - job must be in the same location. - entry_types (MutableSequence[str]): - Required. The entry types that are in scope for the import - job, specified as relative resource names in the format - ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. - The job modifies only the entries and aspects that belong to - these entry types. - - If the metadata import file attempts to modify an entry - whose type isn't included in this list, the import job is - halted before modifying any entries or aspects. - - The location of an entry type must either match the location - of the job, or the entry type must be global. - aspect_types (MutableSequence[str]): - Optional. The aspect types that are in scope for the import - job, specified as relative resource names in the format - ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. - The job modifies only the aspects that belong to these - aspect types. - - This field is required when creating an aspect-only import - job. - - If the metadata import file attempts to modify an aspect - whose type isn't included in this list, the import job is - halted before modifying any entries or aspects. - - The location of an aspect type must either match the - location of the job, or the aspect type must be global. - glossaries (MutableSequence[str]): - Optional. The glossaries that are in scope for the import - job, specified as relative resource names in the format - ``projects/{project_number_or_id}/locations/{location_id}/glossaries/{glossary_id}``. - - While importing Business Glossary entries, the user must - provide glossaries. While importing entries, the user does - not have to provide glossaries. If the metadata import file - attempts to modify Business Glossary entries whose glossary - isn't included in this list, the import job will skip those - entries. - - The location of a glossary must either match the location of - the job, or the glossary must be global. - entry_link_types (MutableSequence[str]): - Optional. The entry link types that are in scope for the - import job, specified as relative resource names in the - format - ``projects/{project_number_or_id}/locations/{location_id}/entryLinkTypes/{entry_link_type_id}``. - The job modifies only the entryLinks that belong to these - entry link types. - - If the metadata import file attempts to create or delete an - entry link whose entry link type isn't included in this - list, the import job will skip those entry links. - referenced_entry_scopes (MutableSequence[str]): - Optional. Defines the scope of entries that can be - referenced in the entry links. - - Currently, projects are supported as valid scopes. Format: - ``projects/{project_number_or_id}`` - - If the metadata import file attempts to create an entry link - which references an entry that is not in the scope, the - import job will skip that entry link. - """ - - entry_groups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - entry_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - glossaries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - entry_link_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - referenced_entry_scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - source_storage_uri: str = proto.Field( - proto.STRING, - number=1, - ) - source_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - scope: 'MetadataJob.ImportJobSpec.ImportJobScope' = proto.Field( - proto.MESSAGE, - number=2, - message='MetadataJob.ImportJobSpec.ImportJobScope', - ) - entry_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( - proto.ENUM, - number=3, - enum='MetadataJob.ImportJobSpec.SyncMode', - ) - aspect_sync_mode: 'MetadataJob.ImportJobSpec.SyncMode' = proto.Field( - proto.ENUM, - number=4, - enum='MetadataJob.ImportJobSpec.SyncMode', - ) - log_level: 'MetadataJob.ImportJobSpec.LogLevel' = proto.Field( - proto.ENUM, - number=6, - enum='MetadataJob.ImportJobSpec.LogLevel', - ) - - class ExportJobSpec(proto.Message): - r"""Job specification for a metadata export job. - - Attributes: - scope (google.cloud.dataplex_v1.types.MetadataJob.ExportJobSpec.ExportJobScope): - Required. The scope of the export job. - output_path (str): - Required. The root path of the Cloud Storage bucket to - export the metadata to, in the format ``gs://{bucket}/``. - You can optionally specify a custom prefix after the bucket - name, in the format ``gs://{bucket}/{prefix}/``. The maximum - length of the custom prefix is 128 characters. Dataplex - Universal Catalog constructs the object path for the - exported files by using the bucket name and prefix that you - provide, followed by a system-generated path. - - The bucket must be in the same VPC Service Controls - perimeter as the job. - """ - - class ExportJobScope(proto.Message): - r"""The scope of the export job. - - Attributes: - organization_level (bool): - Whether the metadata export job is an organization-level - export job. - - - If ``true``, the job exports the entries from the same - organization and VPC Service Controls perimeter as the - job. The project that the job belongs to determines the - VPC Service Controls perimeter. If you set the job scope - to be at the organization level, then don't provide a list - of projects or entry groups. - - If ``false``, you must specify a list of projects or a - list of entry groups whose entries you want to export. - - The default is ``false``. - projects (MutableSequence[str]): - The projects whose metadata you want to export, in the - format ``projects/{project_id_or_number}``. Only the entries - from the specified projects are exported. - - The projects must be in the same organization and VPC - Service Controls perimeter as the job. - - If you set the job scope to be a list of projects, then set - the organization-level export flag to false and don't - provide a list of entry groups. - entry_groups (MutableSequence[str]): - The entry groups whose metadata you want to export, in the - format - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - Only the entries in the specified entry groups are exported. - - The entry groups must be in the same location and the same - VPC Service Controls perimeter as the job. - - If you set the job scope to be a list of entry groups, then - set the organization-level export flag to false and don't - provide a list of projects. - entry_types (MutableSequence[str]): - The entry types that are in scope for the export job, - specified as relative resource names in the format - ``projects/{project_id_or_number}/locations/{location}/entryTypes/{entry_type_id}``. - Only entries that belong to the specified entry types are - affected by the job. - aspect_types (MutableSequence[str]): - The aspect types that are in scope for the export job, - specified as relative resource names in the format - ``projects/{project_id_or_number}/locations/{location}/aspectTypes/{aspect_type_id}``. - Only aspects that belong to the specified aspect types are - affected by the job. - """ - - organization_level: bool = proto.Field( - proto.BOOL, - number=1, - ) - projects: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - entry_groups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - entry_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - aspect_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - scope: 'MetadataJob.ExportJobSpec.ExportJobScope' = proto.Field( - proto.MESSAGE, - number=2, - message='MetadataJob.ExportJobSpec.ExportJobScope', - ) - output_path: str = proto.Field( - proto.STRING, - number=3, - ) - - class Status(proto.Message): - r"""Metadata job status. - - Attributes: - state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): - Output only. State of the metadata job. - message (str): - Output only. Message relating to the - progression of a metadata job. - completion_percent (int): - Output only. Progress tracking. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the status was - updated. - """ - class State(proto.Enum): - r"""State of a metadata job. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - QUEUED (1): - The job is queued. - RUNNING (2): - The job is running. - CANCELING (3): - The job is being canceled. - CANCELED (4): - The job is canceled. - SUCCEEDED (5): - The job succeeded. - FAILED (6): - The job failed. - SUCCEEDED_WITH_ERRORS (7): - The job completed with some errors. - """ - STATE_UNSPECIFIED = 0 - QUEUED = 1 - RUNNING = 2 - CANCELING = 3 - CANCELED = 4 - SUCCEEDED = 5 - FAILED = 6 - SUCCEEDED_WITH_ERRORS = 7 - - state: 'MetadataJob.Status.State' = proto.Field( - proto.ENUM, - number=1, - enum='MetadataJob.Status.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - completion_percent: int = proto.Field( - proto.INT32, - number=3, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - import_spec: ImportJobSpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=ImportJobSpec, - ) - export_spec: ExportJobSpec = proto.Field( - proto.MESSAGE, - number=101, - oneof='spec', - message=ExportJobSpec, - ) - import_result: ImportJobResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=ImportJobResult, - ) - export_result: ExportJobResult = proto.Field( - proto.MESSAGE, - number=201, - oneof='result', - message=ExportJobResult, - ) - status: Status = proto.Field( - proto.MESSAGE, - number=7, - message=Status, - ) - - -class EntryLink(proto.Message): - r"""EntryLink represents a link between two Entries. - - Attributes: - name (str): - Output only. Immutable. Identifier. The relative resource - name of the Entry Link, of the form: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}`` - entry_link_type (str): - Required. Immutable. Relative resource name of the Entry - Link Type used to create this Entry Link. For example: - - - Entry link between synonym terms in a glossary: - ``projects/dataplex-types/locations/global/entryLinkTypes/synonym`` - - Entry link between related terms in a glossary: - ``projects/dataplex-types/locations/global/entryLinkTypes/related`` - - Entry link between glossary terms and data assets: - ``projects/dataplex-types/locations/global/entryLinkTypes/definition`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry Link was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry Link was - last updated. - entry_references (MutableSequence[google.cloud.dataplex_v1.types.EntryLink.EntryReference]): - Required. Specifies the Entries referenced in - the Entry Link. There should be exactly two - entry references. - """ - - class EntryReference(proto.Message): - r"""Reference to the Entry that is linked through the Entry Link. - - Attributes: - name (str): - Required. Immutable. The relative resource name of the - referenced Entry, of the form: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` - path (str): - Immutable. The path in the Entry that is - referenced in the Entry Link. Empty path denotes - that the Entry itself is referenced in the Entry - Link. - type_ (google.cloud.dataplex_v1.types.EntryLink.EntryReference.Type): - Required. Immutable. The reference type of - the Entry. - """ - class Type(proto.Enum): - r"""Reference type of the Entry. - - Values: - UNSPECIFIED (0): - Unspecified reference type. Implies that the - Entry is referenced in a non-directional Entry - Link. - SOURCE (2): - The Entry is referenced as the source of the - directional Entry Link. - TARGET (3): - The Entry is referenced as the target of the - directional Entry Link. - """ - UNSPECIFIED = 0 - SOURCE = 2 - TARGET = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - type_: 'EntryLink.EntryReference.Type' = proto.Field( - proto.ENUM, - number=3, - enum='EntryLink.EntryReference.Type', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - entry_link_type: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - entry_references: MutableSequence[EntryReference] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=EntryReference, - ) - - -class CreateEntryLinkRequest(proto.Message): - r"""Request message for CreateEntryLink. - - Attributes: - parent (str): - Required. The resource name of the parent Entry Group: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. - entry_link_id (str): - Required. Entry Link identifier - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the EntryGroup. - entry_link (google.cloud.dataplex_v1.types.EntryLink): - Required. Entry Link resource. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_link_id: str = proto.Field( - proto.STRING, - number=2, - ) - entry_link: 'EntryLink' = proto.Field( - proto.MESSAGE, - number=3, - message='EntryLink', - ) - - -class DeleteEntryLinkRequest(proto.Message): - r"""Request message for DeleteEntryLink. - - Attributes: - name (str): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetEntryLinkRequest(proto.Message): - r"""Request message for GetEntryLink. - - Attributes: - name (str): - Required. The resource name of the Entry Link: - ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py deleted file mode 100644 index ceab2bf749ad..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/cmek.py +++ /dev/null @@ -1,356 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'EncryptionConfig', - 'CreateEncryptionConfigRequest', - 'GetEncryptionConfigRequest', - 'UpdateEncryptionConfigRequest', - 'DeleteEncryptionConfigRequest', - 'ListEncryptionConfigsRequest', - 'ListEncryptionConfigsResponse', - }, -) - - -class EncryptionConfig(proto.Message): - r"""A Resource designed to manage encryption configurations for - customers to support Customer Managed Encryption Keys (CMEK). - - Attributes: - name (str): - Identifier. The resource name of the EncryptionConfig. - Format: - organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config} - Global location is not supported. - key (str): - Optional. If a key is chosen, it means that - the customer is using CMEK. If a key is not - chosen, it means that the customer is using - Google managed encryption. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Encryption - configuration was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Encryption - configuration was last updated. - encryption_state (google.cloud.dataplex_v1.types.EncryptionConfig.EncryptionState): - Output only. The state of encryption of the - databases. - etag (str): - Etag of the EncryptionConfig. This is a - strong etag. - failure_details (google.cloud.dataplex_v1.types.EncryptionConfig.FailureDetails): - Output only. Details of the failure if - anything related to Cmek db fails. - """ - class EncryptionState(proto.Enum): - r"""State of encryption of the databases when EncryptionConfig is - created or updated. - - Values: - ENCRYPTION_STATE_UNSPECIFIED (0): - State is not specified. - ENCRYPTING (1): - The encryption state of the database when the - EncryptionConfig is created or updated. If the - encryption fails, it is retried indefinitely and - the state is shown as ENCRYPTING. - COMPLETED (2): - The encryption of data has completed - successfully. - FAILED (3): - The encryption of data has failed. - The state is set to FAILED when the encryption - fails due to reasons like permission issues, - invalid key etc. - """ - ENCRYPTION_STATE_UNSPECIFIED = 0 - ENCRYPTING = 1 - COMPLETED = 2 - FAILED = 3 - - class FailureDetails(proto.Message): - r"""Details of the failure if anything related to Cmek db fails. - - Attributes: - error_code (google.cloud.dataplex_v1.types.EncryptionConfig.FailureDetails.ErrorCode): - Output only. The error code for the failure. - error_message (str): - Output only. The error message will be shown to the user. - Set only if the error code is REQUIRE_USER_ACTION. - """ - class ErrorCode(proto.Enum): - r"""Error code for the failure if anything related to Cmek db - fails. - - Values: - UNKNOWN (0): - The error code is not specified - INTERNAL_ERROR (1): - Error because of internal server error, will - be retried automatically. - REQUIRE_USER_ACTION (2): - User action is required to resolve the error. - """ - UNKNOWN = 0 - INTERNAL_ERROR = 1 - REQUIRE_USER_ACTION = 2 - - error_code: 'EncryptionConfig.FailureDetails.ErrorCode' = proto.Field( - proto.ENUM, - number=1, - enum='EncryptionConfig.FailureDetails.ErrorCode', - ) - error_message: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - key: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - encryption_state: EncryptionState = proto.Field( - proto.ENUM, - number=5, - enum=EncryptionState, - ) - etag: str = proto.Field( - proto.STRING, - number=6, - ) - failure_details: FailureDetails = proto.Field( - proto.MESSAGE, - number=7, - message=FailureDetails, - ) - - -class CreateEncryptionConfigRequest(proto.Message): - r"""Create EncryptionConfig Request - - Attributes: - parent (str): - Required. The location at which the - EncryptionConfig is to be created. - encryption_config_id (str): - Required. The ID of the - [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] - to create. Currently, only a value of "default" is - supported. - encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): - Required. The EncryptionConfig to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - encryption_config_id: str = proto.Field( - proto.STRING, - number=2, - ) - encryption_config: 'EncryptionConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='EncryptionConfig', - ) - - -class GetEncryptionConfigRequest(proto.Message): - r"""Get EncryptionConfig Request - - Attributes: - name (str): - Required. The name of the EncryptionConfig to - fetch. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateEncryptionConfigRequest(proto.Message): - r"""Update EncryptionConfig Request - - Attributes: - encryption_config (google.cloud.dataplex_v1.types.EncryptionConfig): - Required. The EncryptionConfig to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. - The service treats an omitted field mask as an - implied field mask equivalent to all fields that - are populated (have a non-empty value). - """ - - encryption_config: 'EncryptionConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='EncryptionConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteEncryptionConfigRequest(proto.Message): - r"""Delete EncryptionConfig Request - - Attributes: - name (str): - Required. The name of the EncryptionConfig to - delete. - etag (str): - Optional. Etag of the EncryptionConfig. This - is a strong etag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEncryptionConfigsRequest(proto.Message): - r"""List EncryptionConfigs Request - - Attributes: - parent (str): - Required. The location for which the - EncryptionConfig is to be listed. - page_size (int): - Optional. Maximum number of EncryptionConfigs - to return. The service may return fewer than - this value. If unspecified, at most 10 - EncryptionConfigs will be returned. The maximum - value is 1000; values above 1000 will be coerced - to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEncryptionConfigs`` call. Provide this to retrieve the - subsequent page. When paginating, the parameters - filter - and order_by provided to ``ListEncryptionConfigs`` must - match the call that provided the page token. - filter (str): - Optional. Filter the EncryptionConfigs to be returned. Using - bare literals: (These values will be matched anywhere it may - appear in the object's field values) - - - filter=some_value Using fields: (These values will be - matched only in the specified field) - - filter=some_field=some_value Supported fields: - - name, key, create_time, update_time, encryption_state - Example: - - filter=name=organizations/123/locations/us-central1/encryptionConfigs/test-config - conjunctions: (AND, OR, NOT) - - filter=name=organizations/123/locations/us-central1/encryptionConfigs/test-config - AND mode=CMEK logical operators: (>, <, >=, <=, !=, =, :), - - filter=create_time>2024-05-01T00:00:00.000Z - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEncryptionConfigsResponse(proto.Message): - r"""List EncryptionConfigs Response - - Attributes: - encryption_configs (MutableSequence[google.cloud.dataplex_v1.types.EncryptionConfig]): - The list of EncryptionConfigs under the given - parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - encryption_configs: MutableSequence['EncryptionConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EncryptionConfig', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py deleted file mode 100644 index 4519a7b7e7ac..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/content.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'CreateContentRequest', - 'UpdateContentRequest', - 'DeleteContentRequest', - 'ListContentRequest', - 'ListContentResponse', - 'GetContentRequest', - }, -) - - -class CreateContentRequest(proto.Message): - r"""Create content request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - content (google.cloud.dataplex_v1.types.Content): - Required. Content resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - content: analyze.Content = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Content, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateContentRequest(proto.Message): - r"""Update content request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - content (google.cloud.dataplex_v1.types.Content): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - content: analyze.Content = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Content, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteContentRequest(proto.Message): - r"""Delete content request. - - Attributes: - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListContentRequest(proto.Message): - r"""List content request. Returns the BASIC Content view. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - projects/{project_id}/locations/{location_id}/lakes/{lake_id} - page_size (int): - Optional. Maximum number of content to - return. The service may return fewer than this - value. If unspecified, at most 10 content will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListContent`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListContent`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. Filters are case-sensitive. The - following formats are supported: - - labels.key1 = "value1" labels:key1 type = "NOTEBOOK" type = - "SQL_SCRIPT" - - These restrictions can be coinjoined with AND, OR and NOT - conjunctions. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListContentResponse(proto.Message): - r"""List content response. - - Attributes: - content (MutableSequence[google.cloud.dataplex_v1.types.Content]): - Content under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - content: MutableSequence[analyze.Content] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Content, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetContentRequest(proto.Message): - r"""Get content request. - - Attributes: - name (str): - Required. The resource name of the content: - projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} - view (google.cloud.dataplex_v1.types.GetContentRequest.ContentView): - Optional. Specify content view to make a - partial request. - """ - class ContentView(proto.Enum): - r"""Specifies whether the request should return the full or the - partial representation. - - Values: - CONTENT_VIEW_UNSPECIFIED (0): - Content view not specified. Defaults to - BASIC. The API will default to the BASIC view. - BASIC (1): - Will not return the ``data_text`` field. - FULL (2): - Returns the complete proto. - """ - CONTENT_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: ContentView = proto.Field( - proto.ENUM, - number=2, - enum=ContentView, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py deleted file mode 100644 index a6d48d618a32..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_discovery.py +++ /dev/null @@ -1,364 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataDiscoverySpec', - 'DataDiscoveryResult', - }, -) - - -class DataDiscoverySpec(proto.Message): - r"""Spec for a data discovery scan. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bigquery_publishing_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig): - Optional. Configuration for metadata - publishing. - storage_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig): - Cloud Storage related configurations. - - This field is a member of `oneof`_ ``resource_config``. - """ - - class BigQueryPublishingConfig(proto.Message): - r"""Describes BigQuery publishing configurations. - - Attributes: - table_type (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig.TableType): - Optional. Determines whether to publish - discovered tables as BigLake external tables or - non-BigLake external tables. - connection (str): - Optional. The BigQuery connection used to create BigLake - tables. Must be in the form - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - location (str): - Optional. The location of the BigQuery dataset to publish - BigLake external or non-BigLake external tables to. - - 1. If the Cloud Storage bucket is located in a multi-region - bucket, then BigQuery dataset can be in the same - multi-region bucket or any single region that is included - in the same multi-region bucket. The datascan can be - created in any single region that is included in the same - multi-region bucket - 2. If the Cloud Storage bucket is located in a dual-region - bucket, then BigQuery dataset can be located in regions - that are included in the dual-region bucket, or in a - multi-region that includes the dual-region. The datascan - can be created in any single region that is included in - the same dual-region bucket. - 3. If the Cloud Storage bucket is located in a single - region, then BigQuery dataset can be in the same single - region or any multi-region bucket that includes the same - single region. The datascan will be created in the same - single region as the bucket. - 4. If the BigQuery dataset is in single region, it must be - in the same single region as the datascan. - - For supported values, refer to - https://cloud.google.com/bigquery/docs/locations#supported_locations. - project (str): - Optional. The project of the BigQuery dataset to publish - BigLake external or non-BigLake external tables to. If not - specified, the project of the Cloud Storage bucket will be - used. The format is "projects/{project_id_or_number}". - """ - class TableType(proto.Enum): - r"""Determines how discovered tables are published. - - Values: - TABLE_TYPE_UNSPECIFIED (0): - Table type unspecified. - EXTERNAL (1): - Default. Discovered tables are published as - BigQuery external tables whose data is accessed - using the credentials of the user querying the - table. - BIGLAKE (2): - Discovered tables are published as BigLake - external tables whose data is accessed using the - credentials of the associated BigQuery - connection. - """ - TABLE_TYPE_UNSPECIFIED = 0 - EXTERNAL = 1 - BIGLAKE = 2 - - table_type: 'DataDiscoverySpec.BigQueryPublishingConfig.TableType' = proto.Field( - proto.ENUM, - number=2, - enum='DataDiscoverySpec.BigQueryPublishingConfig.TableType', - ) - connection: str = proto.Field( - proto.STRING, - number=3, - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - project: str = proto.Field( - proto.STRING, - number=5, - ) - - class StorageConfig(proto.Message): - r"""Configurations related to Cloud Storage as the data source. - - Attributes: - include_patterns (MutableSequence[str]): - Optional. Defines the data to include during - discovery when only a subset of the data should - be considered. Provide a list of patterns that - identify the data to include. For Cloud Storage - bucket assets, these patterns are interpreted as - glob patterns used to match object names. For - BigQuery dataset assets, these patterns are - interpreted as patterns to match table names. - exclude_patterns (MutableSequence[str]): - Optional. Defines the data to exclude during - discovery. Provide a list of patterns that - identify the data to exclude. For Cloud Storage - bucket assets, these patterns are interpreted as - glob patterns used to match object names. For - BigQuery dataset assets, these patterns are - interpreted as patterns to match table names. - csv_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.CsvOptions): - Optional. Configuration for CSV data. - json_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.JsonOptions): - Optional. Configuration for JSON data. - """ - - class CsvOptions(proto.Message): - r"""Describes CSV and similar semi-structured data formats. - - Attributes: - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. - delimiter (str): - Optional. The delimiter that is used to separate values. The - default is ``,`` (comma). - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - type_inference_disabled (bool): - Optional. Whether to disable the inference of - data types for CSV data. If true, all columns - are registered as strings. - quote (str): - Optional. The character used to quote column values. Accepts - ``"`` (double quotation mark) or ``'`` (single quotation - mark). If unspecified, defaults to ``"`` (double quotation - mark). - """ - - header_rows: int = proto.Field( - proto.INT32, - number=1, - ) - delimiter: str = proto.Field( - proto.STRING, - number=2, - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - type_inference_disabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - quote: str = proto.Field( - proto.STRING, - number=5, - ) - - class JsonOptions(proto.Message): - r"""Describes JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - type_inference_disabled (bool): - Optional. Whether to disable the inference of - data types for JSON data. If true, all columns - are registered as their primitive types - (strings, number, or boolean). - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - type_inference_disabled: bool = proto.Field( - proto.BOOL, - number=2, - ) - - include_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - exclude_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - csv_options: 'DataDiscoverySpec.StorageConfig.CsvOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='DataDiscoverySpec.StorageConfig.CsvOptions', - ) - json_options: 'DataDiscoverySpec.StorageConfig.JsonOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='DataDiscoverySpec.StorageConfig.JsonOptions', - ) - - bigquery_publishing_config: BigQueryPublishingConfig = proto.Field( - proto.MESSAGE, - number=1, - message=BigQueryPublishingConfig, - ) - storage_config: StorageConfig = proto.Field( - proto.MESSAGE, - number=100, - oneof='resource_config', - message=StorageConfig, - ) - - -class DataDiscoveryResult(proto.Message): - r"""The output of a data discovery scan. - - Attributes: - bigquery_publishing (google.cloud.dataplex_v1.types.DataDiscoveryResult.BigQueryPublishing): - Output only. Configuration for metadata - publishing. - scan_statistics (google.cloud.dataplex_v1.types.DataDiscoveryResult.ScanStatistics): - Output only. Describes result statistics of a - data scan discovery job. - """ - - class BigQueryPublishing(proto.Message): - r"""Describes BigQuery publishing configurations. - - Attributes: - dataset (str): - Output only. The BigQuery dataset the - discovered tables are published to. - location (str): - Output only. The location of the BigQuery - publishing dataset. - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - - class ScanStatistics(proto.Message): - r"""Describes result statistics of a data scan discovery job. - - Attributes: - scanned_file_count (int): - The number of files scanned. - data_processed_bytes (int): - The data processed in bytes. - files_excluded (int): - The number of files excluded. - tables_created (int): - The number of tables created. - tables_deleted (int): - The number of tables deleted. - tables_updated (int): - The number of tables updated. - filesets_created (int): - The number of filesets created. - filesets_deleted (int): - The number of filesets deleted. - filesets_updated (int): - The number of filesets updated. - """ - - scanned_file_count: int = proto.Field( - proto.INT32, - number=1, - ) - data_processed_bytes: int = proto.Field( - proto.INT64, - number=2, - ) - files_excluded: int = proto.Field( - proto.INT32, - number=3, - ) - tables_created: int = proto.Field( - proto.INT32, - number=4, - ) - tables_deleted: int = proto.Field( - proto.INT32, - number=5, - ) - tables_updated: int = proto.Field( - proto.INT32, - number=6, - ) - filesets_created: int = proto.Field( - proto.INT32, - number=7, - ) - filesets_deleted: int = proto.Field( - proto.INT32, - number=8, - ) - filesets_updated: int = proto.Field( - proto.INT32, - number=9, - ) - - bigquery_publishing: BigQueryPublishing = proto.Field( - proto.MESSAGE, - number=1, - message=BigQueryPublishing, - ) - scan_statistics: ScanStatistics = proto.Field( - proto.MESSAGE, - number=2, - message=ScanStatistics, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py deleted file mode 100644 index cd7c5ce8369f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_profile.py +++ /dev/null @@ -1,546 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import processing - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataProfileSpec', - 'DataProfileResult', - }, -) - - -class DataProfileSpec(proto.Message): - r"""DataProfileScan related setting. - - Attributes: - sampling_percent (float): - Optional. The percentage of the records to be selected from - the dataset for DataScan. - - - Value can range between 0.0 and 100.0 with up to 3 - significant decimal digits. - - Sampling is not applied if ``sampling_percent`` is not - specified, 0 or - - 100. - row_filter (str): - Optional. A filter applied to all rows in a - single DataScan job. The filter needs to be a - valid SQL expression for a WHERE clause in - BigQuery standard SQL syntax. - Example: col1 >= 0 AND col2 < 10 - post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): - Optional. Actions to take upon job - completion.. - include_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): - Optional. The fields to include in data profile. - - If not specified, all fields at the time of profile scan job - execution are included, except for ones listed in - ``exclude_fields``. - exclude_fields (google.cloud.dataplex_v1.types.DataProfileSpec.SelectedFields): - Optional. The fields to exclude from data profile. - - If specified, the fields will be excluded from data profile, - regardless of ``include_fields`` value. - """ - - class PostScanActions(proto.Message): - r"""The configuration of post scan actions of DataProfileScan - job. - - Attributes: - bigquery_export (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions.BigQueryExport): - Optional. If set, results will be exported to - the provided BigQuery table. - """ - - class BigQueryExport(proto.Message): - r"""The configuration of BigQuery export post scan action. - - Attributes: - results_table (str): - Optional. The BigQuery table to export DataProfileScan - results to. Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - """ - - results_table: str = proto.Field( - proto.STRING, - number=1, - ) - - bigquery_export: 'DataProfileSpec.PostScanActions.BigQueryExport' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileSpec.PostScanActions.BigQueryExport', - ) - - class SelectedFields(proto.Message): - r"""The specification for fields to include or exclude in data - profile scan. - - Attributes: - field_names (MutableSequence[str]): - Optional. Expected input is a list of fully - qualified names of fields as in the schema. - - Only top-level field names for nested fields are - supported. For instance, if 'x' is of nested - field type, listing 'x' is supported but 'x.y.z' - is not supported. Here 'y' and 'y.z' are nested - fields of 'x'. - """ - - field_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=2, - ) - row_filter: str = proto.Field( - proto.STRING, - number=3, - ) - post_scan_actions: PostScanActions = proto.Field( - proto.MESSAGE, - number=4, - message=PostScanActions, - ) - include_fields: SelectedFields = proto.Field( - proto.MESSAGE, - number=5, - message=SelectedFields, - ) - exclude_fields: SelectedFields = proto.Field( - proto.MESSAGE, - number=6, - message=SelectedFields, - ) - - -class DataProfileResult(proto.Message): - r"""DataProfileResult defines the output of DataProfileScan. Each - field of the table will have field type specific profile result. - - Attributes: - row_count (int): - Output only. The count of rows scanned. - profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): - Output only. The profile information per - field. - scanned_data (google.cloud.dataplex_v1.types.ScannedData): - Output only. The data scanned for this - result. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): - Output only. The result of post scan actions. - """ - - class Profile(proto.Message): - r"""Contains name, type, mode and field type specific profile - information. - - Attributes: - fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): - Output only. List of fields with structural - and profile information for each field. - """ - - class Field(proto.Message): - r"""A field within a table. - - Attributes: - name (str): - Output only. The name of the field. - type_ (str): - Output only. The data type retrieved from the schema of the - data source. For instance, for a BigQuery native table, it - is the `BigQuery Table - Schema `__. - For a Dataplex Universal Catalog Entity, it is the `Entity - Schema `__. - mode (str): - Output only. The mode of the field. Possible values include: - - - REQUIRED, if it is a required field. - - NULLABLE, if it is an optional field. - - REPEATED, if it is a repeated field. - profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): - Output only. Profile information for the - corresponding field. - """ - - class ProfileInfo(proto.Message): - r"""The profile information for each field type. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - null_ratio (float): - Output only. Ratio of rows with null value - against total scanned rows. - distinct_ratio (float): - Output only. Ratio of rows with distinct - values against total scanned rows. Not available - for complex non-groupable field type, including - RECORD, ARRAY, GEOGRAPHY, and JSON, as well as - fields with REPEATABLE mode. - top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): - Output only. The list of top N non-null - values, frequency and ratio with which they - occur in the scanned data. N is 10 or equal to - the number of distinct values in the field, - whichever is smaller. Not available for complex - non-groupable field type, including RECORD, - ARRAY, GEOGRAPHY, and JSON, as well as fields - with REPEATABLE mode. - string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): - String type field information. - - This field is a member of `oneof`_ ``field_info``. - integer_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo): - Integer type field information. - - This field is a member of `oneof`_ ``field_info``. - double_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo): - Double type field information. - - This field is a member of `oneof`_ ``field_info``. - """ - - class StringFieldInfo(proto.Message): - r"""The profile information for a string type field. - - Attributes: - min_length (int): - Output only. Minimum length of non-null - values in the scanned data. - max_length (int): - Output only. Maximum length of non-null - values in the scanned data. - average_length (float): - Output only. Average length of non-null - values in the scanned data. - """ - - min_length: int = proto.Field( - proto.INT64, - number=1, - ) - max_length: int = proto.Field( - proto.INT64, - number=2, - ) - average_length: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - class IntegerFieldInfo(proto.Message): - r"""The profile information for an integer type field. - - Attributes: - average (float): - Output only. Average of non-null values in - the scanned data. NaN, if the field has a NaN. - standard_deviation (float): - Output only. Standard deviation of non-null - values in the scanned data. NaN, if the field - has a NaN. - min_ (int): - Output only. Minimum of non-null values in - the scanned data. NaN, if the field has a NaN. - quartiles (MutableSequence[int]): - Output only. A quartile divides the number of - data points into four parts, or quarters, of - more-or-less equal size. Three main quartiles - used are: The first quartile (Q1) splits off the - lowest 25% of data from the highest 75%. It is - also known as the lower or 25th empirical - quartile, as 25% of the data is below this - point. The second quartile (Q2) is the median of - a data set. So, 50% of the data lies below this - point. The third quartile (Q3) splits off the - highest 25% of data from the lowest 75%. It is - known as the upper or 75th empirical quartile, - as 75% of the data lies below this point. Here, - the quartiles is provided as an ordered list of - approximate quartile values for the scanned - data, occurring in order Q1, median, Q3. - max_ (int): - Output only. Maximum of non-null values in - the scanned data. NaN, if the field has a NaN. - """ - - average: float = proto.Field( - proto.DOUBLE, - number=1, - ) - standard_deviation: float = proto.Field( - proto.DOUBLE, - number=3, - ) - min_: int = proto.Field( - proto.INT64, - number=4, - ) - quartiles: MutableSequence[int] = proto.RepeatedField( - proto.INT64, - number=6, - ) - max_: int = proto.Field( - proto.INT64, - number=5, - ) - - class DoubleFieldInfo(proto.Message): - r"""The profile information for a double type field. - - Attributes: - average (float): - Output only. Average of non-null values in - the scanned data. NaN, if the field has a NaN. - standard_deviation (float): - Output only. Standard deviation of non-null - values in the scanned data. NaN, if the field - has a NaN. - min_ (float): - Output only. Minimum of non-null values in - the scanned data. NaN, if the field has a NaN. - quartiles (MutableSequence[float]): - Output only. A quartile divides the number of - data points into four parts, or quarters, of - more-or-less equal size. Three main quartiles - used are: The first quartile (Q1) splits off the - lowest 25% of data from the highest 75%. It is - also known as the lower or 25th empirical - quartile, as 25% of the data is below this - point. The second quartile (Q2) is the median of - a data set. So, 50% of the data lies below this - point. The third quartile (Q3) splits off the - highest 25% of data from the lowest 75%. It is - known as the upper or 75th empirical quartile, - as 75% of the data lies below this point. Here, - the quartiles is provided as an ordered list of - quartile values for the scanned data, occurring - in order Q1, median, Q3. - max_ (float): - Output only. Maximum of non-null values in - the scanned data. NaN, if the field has a NaN. - """ - - average: float = proto.Field( - proto.DOUBLE, - number=1, - ) - standard_deviation: float = proto.Field( - proto.DOUBLE, - number=3, - ) - min_: float = proto.Field( - proto.DOUBLE, - number=4, - ) - quartiles: MutableSequence[float] = proto.RepeatedField( - proto.DOUBLE, - number=6, - ) - max_: float = proto.Field( - proto.DOUBLE, - number=5, - ) - - class TopNValue(proto.Message): - r"""Top N non-null values in the scanned data. - - Attributes: - value (str): - Output only. String value of a top N non-null - value. - count (int): - Output only. Count of the corresponding value - in the scanned data. - ratio (float): - Output only. Ratio of the corresponding value - in the field against the total number of rows in - the scanned data. - """ - - value: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - ratio: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - null_ratio: float = proto.Field( - proto.DOUBLE, - number=2, - ) - distinct_ratio: float = proto.Field( - proto.DOUBLE, - number=3, - ) - top_n_values: MutableSequence['DataProfileResult.Profile.Field.ProfileInfo.TopNValue'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='DataProfileResult.Profile.Field.ProfileInfo.TopNValue', - ) - string_profile: 'DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo' = proto.Field( - proto.MESSAGE, - number=101, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo', - ) - integer_profile: 'DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo' = proto.Field( - proto.MESSAGE, - number=102, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.IntegerFieldInfo', - ) - double_profile: 'DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo' = proto.Field( - proto.MESSAGE, - number=103, - oneof='field_info', - message='DataProfileResult.Profile.Field.ProfileInfo.DoubleFieldInfo', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - mode: str = proto.Field( - proto.STRING, - number=3, - ) - profile: 'DataProfileResult.Profile.Field.ProfileInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='DataProfileResult.Profile.Field.ProfileInfo', - ) - - fields: MutableSequence['DataProfileResult.Profile.Field'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataProfileResult.Profile.Field', - ) - - class PostScanActionsResult(proto.Message): - r"""The result of post scan actions of DataProfileScan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult): - Output only. The result of BigQuery export - post scan action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult.BigQueryExportResult.State): - Output only. Execution state for the BigQuery - exporting. - message (str): - Output only. Additional information about the - BigQuery exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataProfileResult.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataProfileResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataProfileResult.PostScanActionsResult.BigQueryExportResult', - ) - - row_count: int = proto.Field( - proto.INT64, - number=3, - ) - profile: Profile = proto.Field( - proto.MESSAGE, - number=4, - message=Profile, - ) - scanned_data: processing.ScannedData = proto.Field( - proto.MESSAGE, - number=5, - message=processing.ScannedData, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=6, - message=PostScanActionsResult, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py deleted file mode 100644 index 20217defc1a9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_quality.py +++ /dev/null @@ -1,962 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import datascans_common -from google.cloud.dataplex_v1.types import processing - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataQualitySpec', - 'DataQualityResult', - 'DataQualityRuleResult', - 'DataQualityDimensionResult', - 'DataQualityDimension', - 'DataQualityRule', - 'DataQualityColumnResult', - }, -) - - -class DataQualitySpec(proto.Message): - r"""DataQualityScan related setting. - - Attributes: - rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): - Required. The list of rules to evaluate - against a data source. At least one rule is - required. - sampling_percent (float): - Optional. The percentage of the records to be selected from - the dataset for DataScan. - - - Value can range between 0.0 and 100.0 with up to 3 - significant decimal digits. - - Sampling is not applied if ``sampling_percent`` is not - specified, 0 or - - 100. - row_filter (str): - Optional. A filter applied to all rows in a single DataScan - job. The filter needs to be a valid SQL expression for a - `WHERE clause in GoogleSQL - syntax `__. - - Example: col1 >= 0 AND col2 < 10 - post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): - Optional. Actions to take upon job - completion. - catalog_publishing_enabled (bool): - Optional. If set, the latest DataScan job - result will be published as Dataplex Universal - Catalog metadata. - """ - - class PostScanActions(proto.Message): - r"""The configuration of post scan actions of DataQualityScan. - - Attributes: - bigquery_export (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.BigQueryExport): - Optional. If set, results will be exported to - the provided BigQuery table. - notification_report (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.NotificationReport): - Optional. If set, results will be sent to the - provided notification receipts upon triggers. - """ - - class BigQueryExport(proto.Message): - r"""The configuration of BigQuery export post scan action. - - Attributes: - results_table (str): - Optional. The BigQuery table to export DataQualityScan - results to. Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - """ - - results_table: str = proto.Field( - proto.STRING, - number=1, - ) - - class Recipients(proto.Message): - r"""The individuals or groups who are designated to receive - notifications upon triggers. - - Attributes: - emails (MutableSequence[str]): - Optional. The email recipients who will - receive the DataQualityScan results report. - """ - - emails: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - class ScoreThresholdTrigger(proto.Message): - r"""This trigger is triggered when the DQ score in the job result - is less than a specified input score. - - Attributes: - score_threshold (float): - Optional. The score range is in [0,100]. - """ - - score_threshold: float = proto.Field( - proto.FLOAT, - number=2, - ) - - class JobFailureTrigger(proto.Message): - r"""This trigger is triggered when the scan job itself fails, - regardless of the result. - - """ - - class JobEndTrigger(proto.Message): - r"""This trigger is triggered whenever a scan job run ends, - regardless of the result. - - """ - - class NotificationReport(proto.Message): - r"""The configuration of notification report post scan action. - - Attributes: - recipients (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.Recipients): - Required. The recipients who will receive the - notification report. - score_threshold_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.ScoreThresholdTrigger): - Optional. If set, report will be sent when - score threshold is met. - job_failure_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobFailureTrigger): - Optional. If set, report will be sent when a - scan job fails. - job_end_trigger (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions.JobEndTrigger): - Optional. If set, report will be sent when a - scan job ends. - """ - - recipients: 'DataQualitySpec.PostScanActions.Recipients' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualitySpec.PostScanActions.Recipients', - ) - score_threshold_trigger: 'DataQualitySpec.PostScanActions.ScoreThresholdTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='DataQualitySpec.PostScanActions.ScoreThresholdTrigger', - ) - job_failure_trigger: 'DataQualitySpec.PostScanActions.JobFailureTrigger' = proto.Field( - proto.MESSAGE, - number=4, - message='DataQualitySpec.PostScanActions.JobFailureTrigger', - ) - job_end_trigger: 'DataQualitySpec.PostScanActions.JobEndTrigger' = proto.Field( - proto.MESSAGE, - number=5, - message='DataQualitySpec.PostScanActions.JobEndTrigger', - ) - - bigquery_export: 'DataQualitySpec.PostScanActions.BigQueryExport' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualitySpec.PostScanActions.BigQueryExport', - ) - notification_report: 'DataQualitySpec.PostScanActions.NotificationReport' = proto.Field( - proto.MESSAGE, - number=2, - message='DataQualitySpec.PostScanActions.NotificationReport', - ) - - rules: MutableSequence['DataQualityRule'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataQualityRule', - ) - sampling_percent: float = proto.Field( - proto.FLOAT, - number=4, - ) - row_filter: str = proto.Field( - proto.STRING, - number=5, - ) - post_scan_actions: PostScanActions = proto.Field( - proto.MESSAGE, - number=6, - message=PostScanActions, - ) - catalog_publishing_enabled: bool = proto.Field( - proto.BOOL, - number=8, - ) - - -class DataQualityResult(proto.Message): - r"""The output of a DataQualityScan. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - passed (bool): - Output only. Overall data quality result -- ``true`` if all - rules passed. - score (float): - Output only. The overall data quality score. - - The score ranges between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): - Output only. A list of results at the dimension level. - - A dimension will have a corresponding - ``DataQualityDimensionResult`` if and only if there is at - least one rule with the 'dimension' field set to it. - columns (MutableSequence[google.cloud.dataplex_v1.types.DataQualityColumnResult]): - Output only. A list of results at the column level. - - A column will have a corresponding - ``DataQualityColumnResult`` if and only if there is at least - one rule with the 'column' field set to it. - rules (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRuleResult]): - Output only. A list of all the rules in a - job, and their results. - row_count (int): - Output only. The count of rows processed. - scanned_data (google.cloud.dataplex_v1.types.ScannedData): - Output only. The data scanned for this - result. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): - Output only. The result of post scan actions. - catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): - Output only. The status of publishing the - data scan as Dataplex Universal Catalog - metadata. - """ - - class PostScanActionsResult(proto.Message): - r"""The result of post scan actions of DataQualityScan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult): - Output only. The result of BigQuery export - post scan action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult.BigQueryExportResult.State): - Output only. Execution state for the BigQuery - exporting. - message (str): - Output only. Additional information about the - BigQuery exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataQualityResult.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataQualityResult.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityResult.PostScanActionsResult.BigQueryExportResult', - ) - - passed: bool = proto.Field( - proto.BOOL, - number=5, - ) - score: float = proto.Field( - proto.FLOAT, - number=9, - optional=True, - ) - dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataQualityDimensionResult', - ) - columns: MutableSequence['DataQualityColumnResult'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='DataQualityColumnResult', - ) - rules: MutableSequence['DataQualityRuleResult'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='DataQualityRuleResult', - ) - row_count: int = proto.Field( - proto.INT64, - number=4, - ) - scanned_data: processing.ScannedData = proto.Field( - proto.MESSAGE, - number=7, - message=processing.ScannedData, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=8, - message=PostScanActionsResult, - ) - catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = proto.Field( - proto.MESSAGE, - number=11, - message=datascans_common.DataScanCatalogPublishingStatus, - ) - - -class DataQualityRuleResult(proto.Message): - r"""DataQualityRuleResult provides a more detailed, per-rule view - of the results. - - Attributes: - rule (google.cloud.dataplex_v1.types.DataQualityRule): - Output only. The rule specified in the - DataQualitySpec, as is. - passed (bool): - Output only. Whether the rule passed or - failed. - evaluated_count (int): - Output only. The number of rows a rule was evaluated - against. - - This field is only valid for row-level type rules. - - Evaluated count can be configured to either - - - include all rows (default) - with ``null`` rows - automatically failing rule evaluation, or - - exclude ``null`` rows from the ``evaluated_count``, by - setting ``ignore_nulls = true``. - - This field is not set for rule SqlAssertion. - passed_count (int): - Output only. The number of rows which passed - a rule evaluation. - This field is only valid for row-level type - rules. - - This field is not set for rule SqlAssertion. - null_count (int): - Output only. The number of rows with null - values in the specified column. - pass_ratio (float): - Output only. The ratio of **passed_count / - evaluated_count**. - - This field is only valid for row-level type rules. - failing_rows_query (str): - Output only. The query to find rows that did - not pass this rule. - This field is only valid for row-level type - rules. - assertion_row_count (int): - Output only. The number of rows returned by - the SQL statement in a SQL assertion rule. - - This field is only valid for SQL assertion - rules. - """ - - rule: 'DataQualityRule' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityRule', - ) - passed: bool = proto.Field( - proto.BOOL, - number=7, - ) - evaluated_count: int = proto.Field( - proto.INT64, - number=9, - ) - passed_count: int = proto.Field( - proto.INT64, - number=8, - ) - null_count: int = proto.Field( - proto.INT64, - number=5, - ) - pass_ratio: float = proto.Field( - proto.DOUBLE, - number=6, - ) - failing_rows_query: str = proto.Field( - proto.STRING, - number=10, - ) - assertion_row_count: int = proto.Field( - proto.INT64, - number=11, - ) - - -class DataQualityDimensionResult(proto.Message): - r"""DataQualityDimensionResult provides a more detailed, - per-dimension view of the results. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - dimension (google.cloud.dataplex_v1.types.DataQualityDimension): - Output only. The dimension config specified - in the DataQualitySpec, as is. - passed (bool): - Output only. Whether the dimension passed or - failed. - score (float): - Output only. The dimension-level data quality score for this - data scan job if and only if the 'dimension' field is set. - - The score ranges between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - """ - - dimension: 'DataQualityDimension' = proto.Field( - proto.MESSAGE, - number=1, - message='DataQualityDimension', - ) - passed: bool = proto.Field( - proto.BOOL, - number=3, - ) - score: float = proto.Field( - proto.FLOAT, - number=4, - optional=True, - ) - - -class DataQualityDimension(proto.Message): - r"""A dimension captures data quality intent about a defined - subset of the rules specified. - - Attributes: - name (str): - Output only. The dimension name a rule - belongs to. Custom dimension name is supported - with all uppercase letters and maximum length of - 30 characters. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DataQualityRule(proto.Message): - r"""A rule captures data quality intent about a data source. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RangeExpectation): - Row-level rule which evaluates whether each - column value lies between a specified range. - - This field is a member of `oneof`_ ``rule_type``. - non_null_expectation (google.cloud.dataplex_v1.types.DataQualityRule.NonNullExpectation): - Row-level rule which evaluates whether each - column value is null. - - This field is a member of `oneof`_ ``rule_type``. - set_expectation (google.cloud.dataplex_v1.types.DataQualityRule.SetExpectation): - Row-level rule which evaluates whether each - column value is contained by a specified set. - - This field is a member of `oneof`_ ``rule_type``. - regex_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RegexExpectation): - Row-level rule which evaluates whether each - column value matches a specified regex. - - This field is a member of `oneof`_ ``rule_type``. - uniqueness_expectation (google.cloud.dataplex_v1.types.DataQualityRule.UniquenessExpectation): - Row-level rule which evaluates whether each - column value is unique. - - This field is a member of `oneof`_ ``rule_type``. - statistic_range_expectation (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation): - Aggregate rule which evaluates whether the - column aggregate statistic lies between a - specified range. - - This field is a member of `oneof`_ ``rule_type``. - row_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.RowConditionExpectation): - Row-level rule which evaluates whether each - row in a table passes the specified condition. - - This field is a member of `oneof`_ ``rule_type``. - table_condition_expectation (google.cloud.dataplex_v1.types.DataQualityRule.TableConditionExpectation): - Aggregate rule which evaluates whether the - provided expression is true for a table. - - This field is a member of `oneof`_ ``rule_type``. - sql_assertion (google.cloud.dataplex_v1.types.DataQualityRule.SqlAssertion): - Aggregate rule which evaluates the number of - rows returned for the provided statement. If any - rows are returned, this rule fails. - - This field is a member of `oneof`_ ``rule_type``. - column (str): - Optional. The unnested column which this rule - is evaluated against. - ignore_null (bool): - Optional. Rows with ``null`` values will automatically fail - a rule, unless ``ignore_null`` is ``true``. In that case, - such ``null`` rows are trivially considered passing. - - This field is only valid for the following type of rules: - - - RangeExpectation - - RegexExpectation - - SetExpectation - - UniquenessExpectation - dimension (str): - Required. The dimension a rule belongs to. - Results are also aggregated at the dimension - level. Custom dimension name is supported with - all uppercase letters and maximum length of 30 - characters. - threshold (float): - Optional. The minimum ratio of **passing_rows / total_rows** - required to pass this rule, with a range of [0.0, 1.0]. - - 0 indicates default value (i.e. 1.0). - - This field is only valid for row-level type rules. - name (str): - Optional. A mutable name for the rule. - - - The name must contain only letters (a-z, A-Z), numbers - (0-9), or hyphens (-). - - The maximum length is 63 characters. - - Must start with a letter. - - Must end with a number or a letter. - description (str): - Optional. Description of the rule. - - - The maximum length is 1,024 characters. - suspended (bool): - Optional. Whether the Rule is active or - suspended. Default is false. - """ - - class RangeExpectation(proto.Message): - r"""Evaluates whether each column value lies between a specified - range. - - Attributes: - min_value (str): - Optional. The minimum column value allowed for a row to pass - this validation. At least one of ``min_value`` and - ``max_value`` need to be provided. - max_value (str): - Optional. The maximum column value allowed for a row to pass - this validation. At least one of ``min_value`` and - ``max_value`` need to be provided. - strict_min_enabled (bool): - Optional. Whether each value needs to be strictly greater - than ('>') the minimum, or if equality is allowed. - - Only relevant if a ``min_value`` has been defined. Default = - false. - strict_max_enabled (bool): - Optional. Whether each value needs to be strictly lesser - than ('<') the maximum, or if equality is allowed. - - Only relevant if a ``max_value`` has been defined. Default = - false. - """ - - min_value: str = proto.Field( - proto.STRING, - number=1, - ) - max_value: str = proto.Field( - proto.STRING, - number=2, - ) - strict_min_enabled: bool = proto.Field( - proto.BOOL, - number=3, - ) - strict_max_enabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class NonNullExpectation(proto.Message): - r"""Evaluates whether each column value is null. - """ - - class SetExpectation(proto.Message): - r"""Evaluates whether each column value is contained by a - specified set. - - Attributes: - values (MutableSequence[str]): - Optional. Expected values for the column - value. - """ - - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - class RegexExpectation(proto.Message): - r"""Evaluates whether each column value matches a specified - regex. - - Attributes: - regex (str): - Optional. A regular expression the column - value is expected to match. - """ - - regex: str = proto.Field( - proto.STRING, - number=1, - ) - - class UniquenessExpectation(proto.Message): - r"""Evaluates whether the column has duplicates. - """ - - class StatisticRangeExpectation(proto.Message): - r"""Evaluates whether the column aggregate statistic lies between - a specified range. - - Attributes: - statistic (google.cloud.dataplex_v1.types.DataQualityRule.StatisticRangeExpectation.ColumnStatistic): - Optional. The aggregate metric to evaluate. - min_value (str): - Optional. The minimum column statistic value allowed for a - row to pass this validation. - - At least one of ``min_value`` and ``max_value`` need to be - provided. - max_value (str): - Optional. The maximum column statistic value allowed for a - row to pass this validation. - - At least one of ``min_value`` and ``max_value`` need to be - provided. - strict_min_enabled (bool): - Optional. Whether column statistic needs to be strictly - greater than ('>') the minimum, or if equality is allowed. - - Only relevant if a ``min_value`` has been defined. Default = - false. - strict_max_enabled (bool): - Optional. Whether column statistic needs to be strictly - lesser than ('<') the maximum, or if equality is allowed. - - Only relevant if a ``max_value`` has been defined. Default = - false. - """ - class ColumnStatistic(proto.Enum): - r"""The list of aggregate metrics a rule can be evaluated - against. - - Values: - STATISTIC_UNDEFINED (0): - Unspecified statistic type - MEAN (1): - Evaluate the column mean - MIN (2): - Evaluate the column min - MAX (3): - Evaluate the column max - """ - STATISTIC_UNDEFINED = 0 - MEAN = 1 - MIN = 2 - MAX = 3 - - statistic: 'DataQualityRule.StatisticRangeExpectation.ColumnStatistic' = proto.Field( - proto.ENUM, - number=1, - enum='DataQualityRule.StatisticRangeExpectation.ColumnStatistic', - ) - min_value: str = proto.Field( - proto.STRING, - number=2, - ) - max_value: str = proto.Field( - proto.STRING, - number=3, - ) - strict_min_enabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - strict_max_enabled: bool = proto.Field( - proto.BOOL, - number=5, - ) - - class RowConditionExpectation(proto.Message): - r"""Evaluates whether each row passes the specified condition. - - The SQL expression needs to use `GoogleSQL - syntax `__ - and should produce a boolean value per row as the result. - - Example: col1 >= 0 AND col2 < 10 - - Attributes: - sql_expression (str): - Optional. The SQL expression. - """ - - sql_expression: str = proto.Field( - proto.STRING, - number=1, - ) - - class TableConditionExpectation(proto.Message): - r"""Evaluates whether the provided expression is true. - - The SQL expression needs to use `GoogleSQL - syntax `__ - and should produce a scalar boolean result. - - Example: MIN(col1) >= 0 - - Attributes: - sql_expression (str): - Optional. The SQL expression. - """ - - sql_expression: str = proto.Field( - proto.STRING, - number=1, - ) - - class SqlAssertion(proto.Message): - r"""A SQL statement that is evaluated to return rows that match an - invalid state. If any rows are are returned, this rule fails. - - The SQL statement must use `GoogleSQL - syntax `__, - and must not contain any semicolons. - - You can use the data reference parameter ``${data()}`` to reference - the source table with all of its precondition filters applied. - Examples of precondition filters include row filters, incremental - data filters, and sampling. For more information, see `Data - reference - parameter `__. - - Example: ``SELECT * FROM ${data()} WHERE price < 0`` - - Attributes: - sql_statement (str): - Optional. The SQL statement. - """ - - sql_statement: str = proto.Field( - proto.STRING, - number=1, - ) - - range_expectation: RangeExpectation = proto.Field( - proto.MESSAGE, - number=1, - oneof='rule_type', - message=RangeExpectation, - ) - non_null_expectation: NonNullExpectation = proto.Field( - proto.MESSAGE, - number=2, - oneof='rule_type', - message=NonNullExpectation, - ) - set_expectation: SetExpectation = proto.Field( - proto.MESSAGE, - number=3, - oneof='rule_type', - message=SetExpectation, - ) - regex_expectation: RegexExpectation = proto.Field( - proto.MESSAGE, - number=4, - oneof='rule_type', - message=RegexExpectation, - ) - uniqueness_expectation: UniquenessExpectation = proto.Field( - proto.MESSAGE, - number=100, - oneof='rule_type', - message=UniquenessExpectation, - ) - statistic_range_expectation: StatisticRangeExpectation = proto.Field( - proto.MESSAGE, - number=101, - oneof='rule_type', - message=StatisticRangeExpectation, - ) - row_condition_expectation: RowConditionExpectation = proto.Field( - proto.MESSAGE, - number=200, - oneof='rule_type', - message=RowConditionExpectation, - ) - table_condition_expectation: TableConditionExpectation = proto.Field( - proto.MESSAGE, - number=201, - oneof='rule_type', - message=TableConditionExpectation, - ) - sql_assertion: SqlAssertion = proto.Field( - proto.MESSAGE, - number=202, - oneof='rule_type', - message=SqlAssertion, - ) - column: str = proto.Field( - proto.STRING, - number=500, - ) - ignore_null: bool = proto.Field( - proto.BOOL, - number=501, - ) - dimension: str = proto.Field( - proto.STRING, - number=502, - ) - threshold: float = proto.Field( - proto.DOUBLE, - number=503, - ) - name: str = proto.Field( - proto.STRING, - number=504, - ) - description: str = proto.Field( - proto.STRING, - number=505, - ) - suspended: bool = proto.Field( - proto.BOOL, - number=506, - ) - - -class DataQualityColumnResult(proto.Message): - r"""DataQualityColumnResult provides a more detailed, per-column - view of the results. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - column (str): - Output only. The column specified in the - DataQualityRule. - score (float): - Output only. The column-level data quality score for this - data scan job if and only if the 'column' field is set. - - The score ranges between between [0, 100] (up to two decimal - points). - - This field is a member of `oneof`_ ``_score``. - passed (bool): - Output only. Whether the column passed or - failed. - dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): - Output only. The dimension-level results for - this column. - """ - - column: str = proto.Field( - proto.STRING, - number=1, - ) - score: float = proto.Field( - proto.FLOAT, - number=2, - optional=True, - ) - passed: bool = proto.Field( - proto.BOOL, - number=3, - ) - dimensions: MutableSequence['DataQualityDimensionResult'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='DataQualityDimensionResult', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py deleted file mode 100644 index bc788d18cd85..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/data_taxonomy.py +++ /dev/null @@ -1,972 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import security -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataTaxonomy', - 'DataAttribute', - 'DataAttributeBinding', - 'CreateDataTaxonomyRequest', - 'UpdateDataTaxonomyRequest', - 'GetDataTaxonomyRequest', - 'ListDataTaxonomiesRequest', - 'ListDataTaxonomiesResponse', - 'DeleteDataTaxonomyRequest', - 'CreateDataAttributeRequest', - 'UpdateDataAttributeRequest', - 'GetDataAttributeRequest', - 'ListDataAttributesRequest', - 'ListDataAttributesResponse', - 'DeleteDataAttributeRequest', - 'CreateDataAttributeBindingRequest', - 'UpdateDataAttributeBindingRequest', - 'GetDataAttributeBindingRequest', - 'ListDataAttributeBindingsRequest', - 'ListDataAttributeBindingsResponse', - 'DeleteDataAttributeBindingRequest', - }, -) - - -class DataTaxonomy(proto.Message): - r"""DataTaxonomy represents a set of hierarchical DataAttributes - resources, grouped with a common theme Eg: - 'SensitiveDataTaxonomy' can have attributes to manage PII data. - It is defined at project level. - - Attributes: - name (str): - Output only. The relative resource name of the DataTaxonomy, - of the form: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}. - uid (str): - Output only. System generated globally unique - ID for the dataTaxonomy. This ID will be - different if the DataTaxonomy is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataTaxonomy - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataTaxonomy - was last updated. - description (str): - Optional. Description of the DataTaxonomy. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataTaxonomy. - attribute_count (int): - Output only. The number of attributes in the - DataTaxonomy. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - class_count (int): - Output only. The number of classes in the - DataTaxonomy. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - attribute_count: int = proto.Field( - proto.INT32, - number=9, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - class_count: int = proto.Field( - proto.INT32, - number=11, - ) - - -class DataAttribute(proto.Message): - r"""Denotes one dataAttribute in a dataTaxonomy, for example, PII. - DataAttribute resources can be defined in a hierarchy. A single - dataAttribute resource can contain specs of multiple types - - :: - - PII - - ResourceAccessSpec : - - readers :foo@bar.com - - DataAccessSpec : - - readers :bar@foo.com - - Attributes: - name (str): - Output only. The relative resource name of the - dataAttribute, of the form: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}. - uid (str): - Output only. System generated globally unique - ID for the DataAttribute. This ID will be - different if the DataAttribute is deleted and - re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataAttribute - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataAttribute - was last updated. - description (str): - Optional. Description of the DataAttribute. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataAttribute. - parent_id (str): - Optional. The ID of the parent DataAttribute resource, - should belong to the same data taxonomy. Circular dependency - in parent chain is not valid. Maximum depth of the hierarchy - allowed is 4. [a -> b -> c -> d -> e, depth = 4] - attribute_count (int): - Output only. The number of child attributes - present for this attribute. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - resource_access_spec (google.cloud.dataplex_v1.types.ResourceAccessSpec): - Optional. Specified when applied to a - resource (eg: Cloud Storage bucket, BigQuery - dataset, BigQuery table). - data_access_spec (google.cloud.dataplex_v1.types.DataAccessSpec): - Optional. Specified when applied to data - stored on the resource (eg: rows, columns in - BigQuery Tables). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - parent_id: str = proto.Field( - proto.STRING, - number=8, - ) - attribute_count: int = proto.Field( - proto.INT32, - number=9, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - resource_access_spec: security.ResourceAccessSpec = proto.Field( - proto.MESSAGE, - number=100, - message=security.ResourceAccessSpec, - ) - data_access_spec: security.DataAccessSpec = proto.Field( - proto.MESSAGE, - number=101, - message=security.DataAccessSpec, - ) - - -class DataAttributeBinding(proto.Message): - r"""DataAttributeBinding represents binding of attributes to - resources. Eg: Bind 'CustomerInfo' entity with 'PII' attribute. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the Data - Attribute Binding, of the form: - projects/{project_number}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id} - uid (str): - Output only. System generated globally unique - ID for the DataAttributeBinding. This ID will be - different if the DataAttributeBinding is deleted - and re-created with the same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the - DataAttributeBinding was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the - DataAttributeBinding was last updated. - description (str): - Optional. Description of the - DataAttributeBinding. - display_name (str): - Optional. User friendly display name. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the - DataAttributeBinding. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. Etags - must be used when calling the - DeleteDataAttributeBinding and the - UpdateDataAttributeBinding method. - resource (str): - Optional. Immutable. The resource name of the resource that - is associated to attributes. Presently, only entity resource - is supported in the form: - projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity_id} - Must belong in the same project and region as the attribute - binding, and there can only exist one active binding for a - resource. - - This field is a member of `oneof`_ ``resource_reference``. - attributes (MutableSequence[str]): - Optional. List of attributes to be associated with the - resource, provided in the form: - projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - paths (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding.Path]): - Optional. The list of paths for items within - the associated resource (eg. columns and - partitions within a table) along with attribute - bindings. - """ - - class Path(proto.Message): - r"""Represents a subresource of the given resource, and - associated bindings with it. Currently supported subresources - are column and partition schema fields within a table. - - Attributes: - name (str): - Required. The name identifier of the path. - Nested columns should be of the form: - 'address.city'. - attributes (MutableSequence[str]): - Optional. List of attributes to be associated with the path - of the resource, provided in the form: - projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - attributes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - resource: str = proto.Field( - proto.STRING, - number=100, - oneof='resource_reference', - ) - attributes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=110, - ) - paths: MutableSequence[Path] = proto.RepeatedField( - proto.MESSAGE, - number=120, - message=Path, - ) - - -class CreateDataTaxonomyRequest(proto.Message): - r"""Create DataTaxonomy request. - - Attributes: - parent (str): - - data_taxonomy_id (str): - Required. DataTaxonomy identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Project. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. DataTaxonomy resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_taxonomy_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_taxonomy: 'DataTaxonomy' = proto.Field( - proto.MESSAGE, - number=3, - message='DataTaxonomy', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataTaxonomyRequest(proto.Message): - r"""Update DataTaxonomy request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_taxonomy (google.cloud.dataplex_v1.types.DataTaxonomy): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_taxonomy: 'DataTaxonomy' = proto.Field( - proto.MESSAGE, - number=2, - message='DataTaxonomy', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataTaxonomyRequest(proto.Message): - r"""Get DataTaxonomy request. - - Attributes: - name (str): - - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataTaxonomiesRequest(proto.Message): - r"""List DataTaxonomies request. - - Attributes: - parent (str): - Required. The resource name of the DataTaxonomy location, of - the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of DataTaxonomies to - return. The service may return fewer than this - value. If unspecified, at most 10 DataTaxonomies - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataTaxonomies`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataTaxonomies`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataTaxonomiesResponse(proto.Message): - r"""List DataTaxonomies response. - - Attributes: - data_taxonomies (MutableSequence[google.cloud.dataplex_v1.types.DataTaxonomy]): - DataTaxonomies under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_taxonomies: MutableSequence['DataTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataTaxonomy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataTaxonomyRequest(proto.Message): - r"""Delete DataTaxonomy request. - - Attributes: - name (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - etag (str): - Optional. If the client provided etag value - does not match the current etag value,the - DeleteDataTaxonomy method returns an ABORTED - error. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDataAttributeRequest(proto.Message): - r"""Create DataAttribute request. - - Attributes: - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - data_attribute_id (str): - Required. DataAttribute identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the DataTaxonomy. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. DataAttribute resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_attribute_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_attribute: 'DataAttribute' = proto.Field( - proto.MESSAGE, - number=3, - message='DataAttribute', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataAttributeRequest(proto.Message): - r"""Update DataAttribute request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_attribute (google.cloud.dataplex_v1.types.DataAttribute): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_attribute: 'DataAttribute' = proto.Field( - proto.MESSAGE, - number=2, - message='DataAttribute', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataAttributeRequest(proto.Message): - r"""Get DataAttribute request. - - Attributes: - name (str): - Required. The resource name of the dataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataAttributesRequest(proto.Message): - r"""List DataAttributes request. - - Attributes: - parent (str): - Required. The resource name of the DataTaxonomy: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id} - page_size (int): - Optional. Maximum number of DataAttributes to - return. The service may return fewer than this - value. If unspecified, at most 10 dataAttributes - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataAttributes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataAttributes`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataAttributesResponse(proto.Message): - r"""List DataAttributes response. - - Attributes: - data_attributes (MutableSequence[google.cloud.dataplex_v1.types.DataAttribute]): - DataAttributes under the given parent - DataTaxonomy. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_attributes: MutableSequence['DataAttribute'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataAttribute', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataAttributeRequest(proto.Message): - r"""Delete DataAttribute request. - - Attributes: - name (str): - Required. The resource name of the DataAttribute: - projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id} - etag (str): - Optional. If the client provided etag value - does not match the current etag value, the - DeleteDataAttribute method returns an ABORTED - error response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDataAttributeBindingRequest(proto.Message): - r"""Create DataAttributeBinding request. - - Attributes: - parent (str): - Required. The resource name of the parent data taxonomy - projects/{project_number}/locations/{location_id} - data_attribute_binding_id (str): - Required. DataAttributeBinding identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the Location. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. DataAttributeBinding resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_attribute_binding_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_attribute_binding: 'DataAttributeBinding' = proto.Field( - proto.MESSAGE, - number=3, - message='DataAttributeBinding', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataAttributeBindingRequest(proto.Message): - r"""Update DataAttributeBinding request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - data_attribute_binding (google.cloud.dataplex_v1.types.DataAttributeBinding): - Required. Only fields specified in ``update_mask`` are - updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_attribute_binding: 'DataAttributeBinding' = proto.Field( - proto.MESSAGE, - number=2, - message='DataAttributeBinding', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GetDataAttributeBindingRequest(proto.Message): - r"""Get DataAttributeBinding request. - - Attributes: - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataAttributeBindingsRequest(proto.Message): - r"""List DataAttributeBindings request. - - Attributes: - parent (str): - Required. The resource name of the Location: - projects/{project_number}/locations/{location_id} - page_size (int): - Optional. Maximum number of - DataAttributeBindings to return. The service may - return fewer than this value. If unspecified, at - most 10 DataAttributeBindings will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataAttributeBindings`` call. Provide this to retrieve - the subsequent page. When paginating, all other parameters - provided to ``ListDataAttributeBindings`` must match the - call that provided the page token. - filter (str): - Optional. Filter request. - Filter using resource: - filter=resource:"resource-name" Filter using - attribute: filter=attributes:"attribute-name" - Filter using attribute in paths list: - - filter=paths.attributes:"attribute-name". - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataAttributeBindingsResponse(proto.Message): - r"""List DataAttributeBindings response. - - Attributes: - data_attribute_bindings (MutableSequence[google.cloud.dataplex_v1.types.DataAttributeBinding]): - DataAttributeBindings under the given parent - Location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_attribute_bindings: MutableSequence['DataAttributeBinding'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataAttributeBinding', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteDataAttributeBindingRequest(proto.Message): - r"""Delete DataAttributeBinding request. - - Attributes: - name (str): - Required. The resource name of the DataAttributeBinding: - projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id} - etag (str): - Required. If the client provided etag value - does not match the current etag value, the - DeleteDataAttributeBindingRequest method returns - an ABORTED error response. Etags must be used - when calling the DeleteDataAttributeBinding. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py deleted file mode 100644 index 4994b1db31a0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans.py +++ /dev/null @@ -1,931 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DataScanType', - 'CreateDataScanRequest', - 'UpdateDataScanRequest', - 'DeleteDataScanRequest', - 'GetDataScanRequest', - 'ListDataScansRequest', - 'ListDataScansResponse', - 'RunDataScanRequest', - 'RunDataScanResponse', - 'GetDataScanJobRequest', - 'ListDataScanJobsRequest', - 'ListDataScanJobsResponse', - 'GenerateDataQualityRulesRequest', - 'GenerateDataQualityRulesResponse', - 'DataScan', - 'DataScanJob', - }, -) - - -class DataScanType(proto.Enum): - r"""The type of data scan. - - Values: - DATA_SCAN_TYPE_UNSPECIFIED (0): - The data scan type is unspecified. - DATA_QUALITY (1): - Data quality scan. - DATA_PROFILE (2): - Data profile scan. - DATA_DISCOVERY (3): - Data discovery scan. - """ - DATA_SCAN_TYPE_UNSPECIFIED = 0 - DATA_QUALITY = 1 - DATA_PROFILE = 2 - DATA_DISCOVERY = 3 - - -class CreateDataScanRequest(proto.Message): - r"""Create dataScan request. - - Attributes: - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a Google Cloud region. - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource. - data_scan_id (str): - Required. DataScan identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - validate_only (bool): - Optional. Only validate the request, but do not perform - mutations. The default is ``false``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_scan: 'DataScan' = proto.Field( - proto.MESSAGE, - number=2, - message='DataScan', - ) - data_scan_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateDataScanRequest(proto.Message): - r"""Update dataScan request. - - Attributes: - data_scan (google.cloud.dataplex_v1.types.DataScan): - Required. DataScan resource to be updated. - - Only fields specified in ``update_mask`` are updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Mask of fields to update. - validate_only (bool): - Optional. Only validate the request, but do not perform - mutations. The default is ``false``. - """ - - data_scan: 'DataScan' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScan', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteDataScanRequest(proto.Message): - r"""Delete dataScan request. - - Attributes: - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - force (bool): - Optional. If set to true, any child resources - of this data scan will also be deleted. - (Otherwise, the request will only work if the - data scan has no child resources.) - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class GetDataScanRequest(proto.Message): - r"""Get dataScan request. - - Attributes: - name (str): - Required. The resource name of the dataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): - Optional. Select the DataScan view to return. Defaults to - ``BASIC``. - """ - class DataScanView(proto.Enum): - r"""DataScan view options. - - Values: - DATA_SCAN_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Basic view that does not include *spec* and *result*. - FULL (10): - Include everything. - """ - DATA_SCAN_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 10 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: DataScanView = proto.Field( - proto.ENUM, - number=2, - enum=DataScanView, - ) - - -class ListDataScansRequest(proto.Message): - r"""List dataScans request. - - Attributes: - parent (str): - Required. The resource name of the parent location: - ``projects/{project}/locations/{location_id}`` where - ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of dataScans to - return. The service may return fewer than this - value. If unspecified, at most 500 scans will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataScans`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataScans`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataScansResponse(proto.Message): - r"""List dataScans response. - - Attributes: - data_scans (MutableSequence[google.cloud.dataplex_v1.types.DataScan]): - DataScans (``BASIC`` view only) under the given parent - location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_scans: MutableSequence['DataScan'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataScan', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class RunDataScanRequest(proto.Message): - r"""Run DataScan Request - - Attributes: - name (str): - Required. The resource name of the DataScan: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - - Only **OnDemand** data scans are allowed. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RunDataScanResponse(proto.Message): - r"""Run DataScan Response. - - Attributes: - job (google.cloud.dataplex_v1.types.DataScanJob): - DataScanJob created by RunDataScan request. - """ - - job: 'DataScanJob' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScanJob', - ) - - -class GetDataScanJobRequest(proto.Message): - r"""Get DataScanJob request. - - Attributes: - name (str): - Required. The resource name of the DataScanJob: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): - Optional. Select the DataScanJob view to return. Defaults to - ``BASIC``. - """ - class DataScanJobView(proto.Enum): - r"""DataScanJob view options. - - Values: - DATA_SCAN_JOB_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Basic view that does not include *spec* and *result*. - FULL (10): - Include everything. - """ - DATA_SCAN_JOB_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 10 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: DataScanJobView = proto.Field( - proto.ENUM, - number=2, - enum=DataScanJobView, - ) - - -class ListDataScanJobsRequest(proto.Message): - r"""List DataScanJobs request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - page_size (int): - Optional. Maximum number of DataScanJobs to - return. The service may return fewer than this - value. If unspecified, at most 10 DataScanJobs - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListDataScanJobs`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListDataScanJobs`` must match the call that - provided the page token. - filter (str): - Optional. An expression for filtering the results of the - ListDataScanJobs request. - - If unspecified, all datascan jobs will be returned. Multiple - filters can be applied (with ``AND``, ``OR`` logical - operators). Filters are case-sensitive. - - Allowed fields are: - - - ``start_time`` - - ``end_time`` - - ``start_time`` and ``end_time`` expect RFC-3339 formatted - strings (e.g. 2018-10-08T18:30:00-07:00). - - For instance, 'start_time > 2018-10-08T00:00:00.123456789Z - AND end_time < 2018-10-09T00:00:00.123456789Z' limits - results to DataScanJobs between specified start and end - times. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDataScanJobsResponse(proto.Message): - r"""List DataScanJobs response. - - Attributes: - data_scan_jobs (MutableSequence[google.cloud.dataplex_v1.types.DataScanJob]): - DataScanJobs (``BASIC`` view only) under a given dataScan. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - data_scan_jobs: MutableSequence['DataScanJob'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataScanJob', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GenerateDataQualityRulesRequest(proto.Message): - r"""Request details for generating data quality rule - recommendations. - - Attributes: - name (str): - Required. The name must be one of the following: - - - The name of a data scan with at least one successful, - completed data profiling job - - The name of a successful, completed data profiling job (a - data scan job where the job type is data profiling) - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GenerateDataQualityRulesResponse(proto.Message): - r"""Response details for data quality rule recommendations. - - Attributes: - rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): - The data quality rules that Dataplex - Universal Catalog generates based on the results - of a data profiling scan. - """ - - rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=data_quality.DataQualityRule, - ) - - -class DataScan(proto.Message): - r"""Represents a user-visible job which provides the insights for the - related data source. - - For example: - - - Data quality: generates queries based on the rules and runs - against the data to get data quality check results. For more - information, see `Auto data quality - overview `__. - - Data profile: analyzes the data in tables and generates insights - about the structure, content and relationships (such as null - percent, cardinality, min/max/mean, etc). For more information, - see `About data - profiling `__. - - Data discovery: scans data in Cloud Storage buckets to extract and - then catalog metadata. For more information, see `Discover and - catalog Cloud Storage - data `__. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. The relative resource name of the - scan, of the form: - ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - uid (str): - Output only. System generated globally unique - ID for the scan. This ID will be different if - the scan is deleted and re-created with the same - name. - description (str): - Optional. Description of the scan. - - - Must be between 1-1024 characters. - display_name (str): - Optional. User friendly display name. - - - Must be between 1-256 characters. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the scan. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the DataScan. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the scan was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the scan was last - updated. - data (google.cloud.dataplex_v1.types.DataSource): - Required. The data source for DataScan. - execution_spec (google.cloud.dataplex_v1.types.DataScan.ExecutionSpec): - Optional. DataScan execution settings. - - If not specified, the fields in it will use - their default values. - execution_status (google.cloud.dataplex_v1.types.DataScan.ExecutionStatus): - Output only. Status of the data scan - execution. - type_ (google.cloud.dataplex_v1.types.DataScanType): - Output only. The type of DataScan. - data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Settings for a data quality scan. - - This field is a member of `oneof`_ ``spec``. - data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Settings for a data profile scan. - - This field is a member of `oneof`_ ``spec``. - data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): - Settings for a data discovery scan. - - This field is a member of `oneof`_ ``spec``. - data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of a data quality - scan. - - This field is a member of `oneof`_ ``result``. - data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of a data profile - scan. - - This field is a member of `oneof`_ ``result``. - data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): - Output only. The result of a data discovery - scan. - - This field is a member of `oneof`_ ``result``. - """ - - class ExecutionSpec(proto.Message): - r"""DataScan execution settings. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - trigger (google.cloud.dataplex_v1.types.Trigger): - Optional. Spec related to how often and when a scan should - be triggered. - - If not specified, the default is ``OnDemand``, which means - the scan will not run until the user calls ``RunDataScan`` - API. - field (str): - Immutable. The unnested field (of type *Date* or - *Timestamp*) that contains values which monotonically - increase over time. - - If not specified, a data scan will run for all data in the - table. - - This field is a member of `oneof`_ ``incremental``. - """ - - trigger: processing.Trigger = proto.Field( - proto.MESSAGE, - number=1, - message=processing.Trigger, - ) - field: str = proto.Field( - proto.STRING, - number=100, - oneof='incremental', - ) - - class ExecutionStatus(proto.Message): - r"""Status of the data scan execution. - - Attributes: - latest_job_start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the latest - DataScanJob started. - latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the latest - DataScanJob ended. - latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when the DataScanJob - execution was created. - """ - - latest_job_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - latest_job_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - latest_job_create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=6, - enum=resources.State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - data: processing.DataSource = proto.Field( - proto.MESSAGE, - number=9, - message=processing.DataSource, - ) - execution_spec: ExecutionSpec = proto.Field( - proto.MESSAGE, - number=10, - message=ExecutionSpec, - ) - execution_status: ExecutionStatus = proto.Field( - proto.MESSAGE, - number=11, - message=ExecutionStatus, - ) - type_: 'DataScanType' = proto.Field( - proto.ENUM, - number=12, - enum='DataScanType', - ) - data_quality_spec: data_quality.DataQualitySpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=data_quality.DataQualitySpec, - ) - data_profile_spec: data_profile.DataProfileSpec = proto.Field( - proto.MESSAGE, - number=101, - oneof='spec', - message=data_profile.DataProfileSpec, - ) - data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( - proto.MESSAGE, - number=102, - oneof='spec', - message=data_discovery.DataDiscoverySpec, - ) - data_quality_result: data_quality.DataQualityResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=data_quality.DataQualityResult, - ) - data_profile_result: data_profile.DataProfileResult = proto.Field( - proto.MESSAGE, - number=201, - oneof='result', - message=data_profile.DataProfileResult, - ) - data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( - proto.MESSAGE, - number=202, - oneof='result', - message=data_discovery.DataDiscoveryResult, - ) - - -class DataScanJob(proto.Message): - r"""A DataScanJob represents an instance of DataScan execution. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. The relative resource name of the - DataScanJob, of the form: - ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, - where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a Google - Cloud region. - uid (str): - Output only. System generated globally unique - ID for the DataScanJob. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - was started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the DataScanJob - ended. - state (google.cloud.dataplex_v1.types.DataScanJob.State): - Output only. Execution state for the - DataScanJob. - message (str): - Output only. Additional information about the - current state. - type_ (google.cloud.dataplex_v1.types.DataScanType): - Output only. The type of the parent DataScan. - data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Output only. Settings for a data quality - scan. - - This field is a member of `oneof`_ ``spec``. - data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Output only. Settings for a data profile - scan. - - This field is a member of `oneof`_ ``spec``. - data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): - Output only. Settings for a data discovery - scan. - - This field is a member of `oneof`_ ``spec``. - data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of a data quality - scan. - - This field is a member of `oneof`_ ``result``. - data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of a data profile - scan. - - This field is a member of `oneof`_ ``result``. - data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): - Output only. The result of a data discovery - scan. - - This field is a member of `oneof`_ ``result``. - """ - class State(proto.Enum): - r"""Execution state for the DataScanJob. - - Values: - STATE_UNSPECIFIED (0): - The DataScanJob state is unspecified. - RUNNING (1): - The DataScanJob is running. - CANCELING (2): - The DataScanJob is canceling. - CANCELLED (3): - The DataScanJob cancellation was successful. - SUCCEEDED (4): - The DataScanJob completed successfully. - FAILED (5): - The DataScanJob is no longer running due to - an error. - PENDING (7): - The DataScanJob has been created but not - started to run yet. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - CANCELING = 2 - CANCELLED = 3 - SUCCEEDED = 4 - FAILED = 5 - PENDING = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - message: str = proto.Field( - proto.STRING, - number=6, - ) - type_: 'DataScanType' = proto.Field( - proto.ENUM, - number=7, - enum='DataScanType', - ) - data_quality_spec: data_quality.DataQualitySpec = proto.Field( - proto.MESSAGE, - number=100, - oneof='spec', - message=data_quality.DataQualitySpec, - ) - data_profile_spec: data_profile.DataProfileSpec = proto.Field( - proto.MESSAGE, - number=101, - oneof='spec', - message=data_profile.DataProfileSpec, - ) - data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( - proto.MESSAGE, - number=102, - oneof='spec', - message=data_discovery.DataDiscoverySpec, - ) - data_quality_result: data_quality.DataQualityResult = proto.Field( - proto.MESSAGE, - number=200, - oneof='result', - message=data_quality.DataQualityResult, - ) - data_profile_result: data_profile.DataProfileResult = proto.Field( - proto.MESSAGE, - number=201, - oneof='result', - message=data_profile.DataProfileResult, - ) - data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( - proto.MESSAGE, - number=202, - oneof='result', - message=data_discovery.DataDiscoveryResult, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py deleted file mode 100644 index 4eccf1917483..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/logs.py +++ /dev/null @@ -1,1467 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import datascans_common -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'DiscoveryEvent', - 'JobEvent', - 'SessionEvent', - 'GovernanceEvent', - 'DataScanEvent', - 'DataQualityScanRuleResult', - 'BusinessGlossaryEvent', - 'EntryLinkEvent', - }, -) - - -class DiscoveryEvent(proto.Message): - r"""The payload associated with Discovery data processing. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - lake_id (str): - The id of the associated lake. - zone_id (str): - The id of the associated zone. - asset_id (str): - The id of the associated asset. - data_location (str): - The data location associated with the event. - datascan_id (str): - The id of the associated datascan for - standalone discovery. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): - The type of the event being logged. - config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): - Details about discovery configuration in - effect. - - This field is a member of `oneof`_ ``details``. - entity (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityDetails): - Details about the entity associated with the - event. - - This field is a member of `oneof`_ ``details``. - partition (google.cloud.dataplex_v1.types.DiscoveryEvent.PartitionDetails): - Details about the partition associated with - the event. - - This field is a member of `oneof`_ ``details``. - action (google.cloud.dataplex_v1.types.DiscoveryEvent.ActionDetails): - Details about the action associated with the - event. - - This field is a member of `oneof`_ ``details``. - table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): - Details about the BigQuery table publishing - associated with the event. - - This field is a member of `oneof`_ ``details``. - """ - class EventType(proto.Enum): - r"""The type of the event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - CONFIG (1): - An event representing discovery configuration - in effect. - ENTITY_CREATED (2): - An event representing a metadata entity being - created. - ENTITY_UPDATED (3): - An event representing a metadata entity being - updated. - ENTITY_DELETED (4): - An event representing a metadata entity being - deleted. - PARTITION_CREATED (5): - An event representing a partition being - created. - PARTITION_UPDATED (6): - An event representing a partition being - updated. - PARTITION_DELETED (7): - An event representing a partition being - deleted. - TABLE_PUBLISHED (10): - An event representing a table being - published. - TABLE_UPDATED (11): - An event representing a table being updated. - TABLE_IGNORED (12): - An event representing a table being skipped - in publishing. - TABLE_DELETED (13): - An event representing a table being deleted. - """ - EVENT_TYPE_UNSPECIFIED = 0 - CONFIG = 1 - ENTITY_CREATED = 2 - ENTITY_UPDATED = 3 - ENTITY_DELETED = 4 - PARTITION_CREATED = 5 - PARTITION_UPDATED = 6 - PARTITION_DELETED = 7 - TABLE_PUBLISHED = 10 - TABLE_UPDATED = 11 - TABLE_IGNORED = 12 - TABLE_DELETED = 13 - - class EntityType(proto.Enum): - r"""The type of the entity. - - Values: - ENTITY_TYPE_UNSPECIFIED (0): - An unspecified event type. - TABLE (1): - Entities representing structured data. - FILESET (2): - Entities representing unstructured data. - """ - ENTITY_TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - class TableType(proto.Enum): - r"""The type of the published table. - - Values: - TABLE_TYPE_UNSPECIFIED (0): - An unspecified table type. - EXTERNAL_TABLE (1): - External table type. - BIGLAKE_TABLE (2): - BigLake table type. - OBJECT_TABLE (3): - Object table type for unstructured data. - """ - TABLE_TYPE_UNSPECIFIED = 0 - EXTERNAL_TABLE = 1 - BIGLAKE_TABLE = 2 - OBJECT_TABLE = 3 - - class ConfigDetails(proto.Message): - r"""Details about configuration events. - - Attributes: - parameters (MutableMapping[str, str]): - A list of discovery configuration parameters - in effect. The keys are the field paths within - DiscoverySpec. Eg. includePatterns, - excludePatterns, - csvOptions.disableTypeInference, etc. - """ - - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - class EntityDetails(proto.Message): - r"""Details about the entity. - - Attributes: - entity (str): - The name of the entity resource. - The name is the fully-qualified resource name. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): - The type of the entity resource. - """ - - entity: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DiscoveryEvent.EntityType' = proto.Field( - proto.ENUM, - number=2, - enum='DiscoveryEvent.EntityType', - ) - - class TableDetails(proto.Message): - r"""Details about the published table. - - Attributes: - table (str): - The fully-qualified resource name of the - table resource. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): - The type of the table resource. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DiscoveryEvent.TableType' = proto.Field( - proto.ENUM, - number=2, - enum='DiscoveryEvent.TableType', - ) - - class PartitionDetails(proto.Message): - r"""Details about the partition. - - Attributes: - partition (str): - The name to the partition resource. - The name is the fully-qualified resource name. - entity (str): - The name to the containing entity resource. - The name is the fully-qualified resource name. - type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EntityType): - The type of the containing entity resource. - sampled_data_locations (MutableSequence[str]): - The locations of the data items (e.g., a - Cloud Storage objects) sampled for metadata - inference. - """ - - partition: str = proto.Field( - proto.STRING, - number=1, - ) - entity: str = proto.Field( - proto.STRING, - number=2, - ) - type_: 'DiscoveryEvent.EntityType' = proto.Field( - proto.ENUM, - number=3, - enum='DiscoveryEvent.EntityType', - ) - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - class ActionDetails(proto.Message): - r"""Details about the action. - - Attributes: - type_ (str): - The type of action. - Eg. IncompatibleDataSchema, InvalidDataFormat - issue (str): - The human readable issue associated with the - action. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - issue: str = proto.Field( - proto.STRING, - number=2, - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - lake_id: str = proto.Field( - proto.STRING, - number=2, - ) - zone_id: str = proto.Field( - proto.STRING, - number=3, - ) - asset_id: str = proto.Field( - proto.STRING, - number=4, - ) - data_location: str = proto.Field( - proto.STRING, - number=5, - ) - datascan_id: str = proto.Field( - proto.STRING, - number=6, - ) - type_: EventType = proto.Field( - proto.ENUM, - number=10, - enum=EventType, - ) - config: ConfigDetails = proto.Field( - proto.MESSAGE, - number=20, - oneof='details', - message=ConfigDetails, - ) - entity: EntityDetails = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message=EntityDetails, - ) - partition: PartitionDetails = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message=PartitionDetails, - ) - action: ActionDetails = proto.Field( - proto.MESSAGE, - number=23, - oneof='details', - message=ActionDetails, - ) - table: TableDetails = proto.Field( - proto.MESSAGE, - number=24, - oneof='details', - message=TableDetails, - ) - - -class JobEvent(proto.Message): - r"""The payload associated with Job logs that contains events - describing jobs that have run within a Lake. - - Attributes: - message (str): - The log message. - job_id (str): - The unique id identifying the job. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the job started running. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the job ended running. - state (google.cloud.dataplex_v1.types.JobEvent.State): - The job state on completion. - retries (int): - The number of retries. - type_ (google.cloud.dataplex_v1.types.JobEvent.Type): - The type of the job. - service (google.cloud.dataplex_v1.types.JobEvent.Service): - The service used to execute the job. - service_job (str): - The reference to the job within the service. - execution_trigger (google.cloud.dataplex_v1.types.JobEvent.ExecutionTrigger): - Job execution trigger. - """ - class Type(proto.Enum): - r"""The type of the job. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified job type. - SPARK (1): - Spark jobs. - NOTEBOOK (2): - Notebook jobs. - """ - TYPE_UNSPECIFIED = 0 - SPARK = 1 - NOTEBOOK = 2 - - class State(proto.Enum): - r"""The completion status of the job. - - Values: - STATE_UNSPECIFIED (0): - Unspecified job state. - SUCCEEDED (1): - Job successfully completed. - FAILED (2): - Job was unsuccessful. - CANCELLED (3): - Job was cancelled by the user. - ABORTED (4): - Job was cancelled or aborted via the service - executing the job. - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - CANCELLED = 3 - ABORTED = 4 - - class Service(proto.Enum): - r"""The service used to execute the job. - - Values: - SERVICE_UNSPECIFIED (0): - Unspecified service. - DATAPROC (1): - Cloud Dataproc. - """ - SERVICE_UNSPECIFIED = 0 - DATAPROC = 1 - - class ExecutionTrigger(proto.Enum): - r"""Job Execution trigger. - - Values: - EXECUTION_TRIGGER_UNSPECIFIED (0): - The job execution trigger is unspecified. - TASK_CONFIG (1): - The job was triggered by Dataplex Universal - Catalog based on trigger spec from task - definition. - RUN_REQUEST (2): - The job was triggered by the explicit call of - Task API. - """ - EXECUTION_TRIGGER_UNSPECIFIED = 0 - TASK_CONFIG = 1 - RUN_REQUEST = 2 - - message: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - retries: int = proto.Field( - proto.INT32, - number=6, - ) - type_: Type = proto.Field( - proto.ENUM, - number=7, - enum=Type, - ) - service: Service = proto.Field( - proto.ENUM, - number=8, - enum=Service, - ) - service_job: str = proto.Field( - proto.STRING, - number=9, - ) - execution_trigger: ExecutionTrigger = proto.Field( - proto.ENUM, - number=11, - enum=ExecutionTrigger, - ) - - -class SessionEvent(proto.Message): - r"""These messages contain information about sessions within an - environment. The monitored resource is 'Environment'. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - user_id (str): - The information about the user that created - the session. It will be the email address of the - user. - session_id (str): - Unique identifier for the session. - type_ (google.cloud.dataplex_v1.types.SessionEvent.EventType): - The type of the event. - query (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail): - The execution details of the query. - - This field is a member of `oneof`_ ``detail``. - event_succeeded (bool): - The status of the event. - fast_startup_enabled (bool): - If the session is associated with an - environment with fast startup enabled, and was - created before being assigned to a user. - unassigned_duration (google.protobuf.duration_pb2.Duration): - The idle duration of a warm pooled session - before it is assigned to user. - """ - class EventType(proto.Enum): - r"""The type of the event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - START (1): - Event when the session is assigned to a user. - STOP (2): - Event for stop of a session. - QUERY (3): - Query events in the session. - CREATE (4): - Event for creation of a cluster. It is not - yet assigned to a user. This comes before START - in the sequence - """ - EVENT_TYPE_UNSPECIFIED = 0 - START = 1 - STOP = 2 - QUERY = 3 - CREATE = 4 - - class QueryDetail(proto.Message): - r"""Execution details of the query. - - Attributes: - query_id (str): - The unique Query id identifying the query. - query_text (str): - The query text executed. - engine (google.cloud.dataplex_v1.types.SessionEvent.QueryDetail.Engine): - Query Execution engine. - duration (google.protobuf.duration_pb2.Duration): - Time taken for execution of the query. - result_size_bytes (int): - The size of results the query produced. - data_processed_bytes (int): - The data processed by the query. - """ - class Engine(proto.Enum): - r"""Query Execution engine. - - Values: - ENGINE_UNSPECIFIED (0): - An unspecified Engine type. - SPARK_SQL (1): - Spark-sql engine is specified in Query. - BIGQUERY (2): - BigQuery engine is specified in Query. - """ - ENGINE_UNSPECIFIED = 0 - SPARK_SQL = 1 - BIGQUERY = 2 - - query_id: str = proto.Field( - proto.STRING, - number=1, - ) - query_text: str = proto.Field( - proto.STRING, - number=2, - ) - engine: 'SessionEvent.QueryDetail.Engine' = proto.Field( - proto.ENUM, - number=3, - enum='SessionEvent.QueryDetail.Engine', - ) - duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - result_size_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - data_processed_bytes: int = proto.Field( - proto.INT64, - number=6, - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: str = proto.Field( - proto.STRING, - number=2, - ) - session_id: str = proto.Field( - proto.STRING, - number=3, - ) - type_: EventType = proto.Field( - proto.ENUM, - number=4, - enum=EventType, - ) - query: QueryDetail = proto.Field( - proto.MESSAGE, - number=5, - oneof='detail', - message=QueryDetail, - ) - event_succeeded: bool = proto.Field( - proto.BOOL, - number=6, - ) - fast_startup_enabled: bool = proto.Field( - proto.BOOL, - number=7, - ) - unassigned_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - - -class GovernanceEvent(proto.Message): - r"""Payload associated with Governance related log events. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - message (str): - The log message. - event_type (google.cloud.dataplex_v1.types.GovernanceEvent.EventType): - The type of the event. - entity (google.cloud.dataplex_v1.types.GovernanceEvent.Entity): - Entity resource information if the log event - is associated with a specific entity. - - This field is a member of `oneof`_ ``_entity``. - """ - class EventType(proto.Enum): - r"""Type of governance log event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - RESOURCE_IAM_POLICY_UPDATE (1): - Resource IAM policy update event. - BIGQUERY_TABLE_CREATE (2): - BigQuery table create event. - BIGQUERY_TABLE_UPDATE (3): - BigQuery table update event. - BIGQUERY_TABLE_DELETE (4): - BigQuery table delete event. - BIGQUERY_CONNECTION_CREATE (5): - BigQuery connection create event. - BIGQUERY_CONNECTION_UPDATE (6): - BigQuery connection update event. - BIGQUERY_CONNECTION_DELETE (7): - BigQuery connection delete event. - BIGQUERY_TAXONOMY_CREATE (10): - BigQuery taxonomy created. - BIGQUERY_POLICY_TAG_CREATE (11): - BigQuery policy tag created. - BIGQUERY_POLICY_TAG_DELETE (12): - BigQuery policy tag deleted. - BIGQUERY_POLICY_TAG_SET_IAM_POLICY (13): - BigQuery set iam policy for policy tag. - ACCESS_POLICY_UPDATE (14): - Access policy update event. - GOVERNANCE_RULE_MATCHED_RESOURCES (15): - Number of resources matched with particular - Query. - GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS (16): - Rule processing exceeds the allowed limit. - GOVERNANCE_RULE_ERRORS (17): - Rule processing errors. - GOVERNANCE_RULE_PROCESSING (18): - Governance rule processing Event. - """ - EVENT_TYPE_UNSPECIFIED = 0 - RESOURCE_IAM_POLICY_UPDATE = 1 - BIGQUERY_TABLE_CREATE = 2 - BIGQUERY_TABLE_UPDATE = 3 - BIGQUERY_TABLE_DELETE = 4 - BIGQUERY_CONNECTION_CREATE = 5 - BIGQUERY_CONNECTION_UPDATE = 6 - BIGQUERY_CONNECTION_DELETE = 7 - BIGQUERY_TAXONOMY_CREATE = 10 - BIGQUERY_POLICY_TAG_CREATE = 11 - BIGQUERY_POLICY_TAG_DELETE = 12 - BIGQUERY_POLICY_TAG_SET_IAM_POLICY = 13 - ACCESS_POLICY_UPDATE = 14 - GOVERNANCE_RULE_MATCHED_RESOURCES = 15 - GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS = 16 - GOVERNANCE_RULE_ERRORS = 17 - GOVERNANCE_RULE_PROCESSING = 18 - - class Entity(proto.Message): - r"""Information about Entity resource that the log event is - associated with. - - Attributes: - entity (str): - The Entity resource the log event is associated with. - Format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}`` - entity_type (google.cloud.dataplex_v1.types.GovernanceEvent.Entity.EntityType): - Type of entity. - """ - class EntityType(proto.Enum): - r"""Type of entity. - - Values: - ENTITY_TYPE_UNSPECIFIED (0): - An unspecified Entity type. - TABLE (1): - Table entity type. - FILESET (2): - Fileset entity type. - """ - ENTITY_TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - entity: str = proto.Field( - proto.STRING, - number=1, - ) - entity_type: 'GovernanceEvent.Entity.EntityType' = proto.Field( - proto.ENUM, - number=2, - enum='GovernanceEvent.Entity.EntityType', - ) - - message: str = proto.Field( - proto.STRING, - number=1, - ) - event_type: EventType = proto.Field( - proto.ENUM, - number=2, - enum=EventType, - ) - entity: Entity = proto.Field( - proto.MESSAGE, - number=3, - optional=True, - message=Entity, - ) - - -class DataScanEvent(proto.Message): - r"""These messages contain information about the execution of a - datascan. The monitored resource is 'DataScan' - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - data_source (str): - The data source of the data scan - job_id (str): - The identifier of the specific data scan job - this log entry is for. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job started to - run. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the data scan job finished. - type_ (google.cloud.dataplex_v1.types.DataScanEvent.ScanType): - The type of the data scan. - state (google.cloud.dataplex_v1.types.DataScanEvent.State): - The status of the data scan job. - message (str): - The message describing the data scan job - event. - spec_version (str): - A version identifier of the spec which was - used to execute this job. - trigger (google.cloud.dataplex_v1.types.DataScanEvent.Trigger): - The trigger type of the data scan job. - scope (google.cloud.dataplex_v1.types.DataScanEvent.Scope): - The scope of the data scan (e.g. full, - incremental). - data_profile (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileResult): - Data profile result for data profile type - data scan. - - This field is a member of `oneof`_ ``result``. - data_quality (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityResult): - Data quality result for data quality type - data scan. - - This field is a member of `oneof`_ ``result``. - data_profile_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataProfileAppliedConfigs): - Applied configs for data profile type data - scan. - - This field is a member of `oneof`_ ``appliedConfigs``. - data_quality_configs (google.cloud.dataplex_v1.types.DataScanEvent.DataQualityAppliedConfigs): - Applied configs for data quality type data - scan. - - This field is a member of `oneof`_ ``appliedConfigs``. - post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): - The result of post scan actions. - catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): - The status of publishing the data scan as - Dataplex Universal Catalog metadata. - """ - class ScanType(proto.Enum): - r"""The type of the data scan. - - Values: - SCAN_TYPE_UNSPECIFIED (0): - An unspecified data scan type. - DATA_PROFILE (1): - Data scan for data profile. - DATA_QUALITY (2): - Data scan for data quality. - DATA_DISCOVERY (4): - Data scan for data discovery. - """ - SCAN_TYPE_UNSPECIFIED = 0 - DATA_PROFILE = 1 - DATA_QUALITY = 2 - DATA_DISCOVERY = 4 - - class State(proto.Enum): - r"""The job state of the data scan. - - Values: - STATE_UNSPECIFIED (0): - Unspecified job state. - STARTED (1): - Data scan job started. - SUCCEEDED (2): - Data scan job successfully completed. - FAILED (3): - Data scan job was unsuccessful. - CANCELLED (4): - Data scan job was cancelled. - CREATED (5): - Data scan job was created. - """ - STATE_UNSPECIFIED = 0 - STARTED = 1 - SUCCEEDED = 2 - FAILED = 3 - CANCELLED = 4 - CREATED = 5 - - class Trigger(proto.Enum): - r"""The trigger type for the data scan. - - Values: - TRIGGER_UNSPECIFIED (0): - An unspecified trigger type. - ON_DEMAND (1): - Data scan triggers on demand. - SCHEDULE (2): - Data scan triggers as per schedule. - """ - TRIGGER_UNSPECIFIED = 0 - ON_DEMAND = 1 - SCHEDULE = 2 - - class Scope(proto.Enum): - r"""The scope of job for the data scan. - - Values: - SCOPE_UNSPECIFIED (0): - An unspecified scope type. - FULL (1): - Data scan runs on all of the data. - INCREMENTAL (2): - Data scan runs on incremental data. - """ - SCOPE_UNSPECIFIED = 0 - FULL = 1 - INCREMENTAL = 2 - - class DataProfileResult(proto.Message): - r"""Data profile result for data scan job. - - Attributes: - row_count (int): - The count of rows processed in the data scan - job. - """ - - row_count: int = proto.Field( - proto.INT64, - number=1, - ) - - class DataQualityResult(proto.Message): - r"""Data quality result for data scan job. - - Attributes: - row_count (int): - The count of rows processed in the data scan - job. - passed (bool): - Whether the data quality result was ``pass`` or not. - dimension_passed (MutableMapping[str, bool]): - The result of each dimension for data quality result. The - key of the map is the name of the dimension. The value is - the bool value depicting whether the dimension result was - ``pass`` or not. - score (float): - The table-level data quality score for the data scan job. - - The data quality score ranges between [0, 100] (up to two - decimal points). - dimension_score (MutableMapping[str, float]): - The score of each dimension for data quality result. The key - of the map is the name of the dimension. The value is the - data quality score for the dimension. - - The score ranges between [0, 100] (up to two decimal - points). - column_score (MutableMapping[str, float]): - The score of each column scanned in the data scan job. The - key of the map is the name of the column. The value is the - data quality score for the column. - - The score ranges between [0, 100] (up to two decimal - points). - """ - - row_count: int = proto.Field( - proto.INT64, - number=1, - ) - passed: bool = proto.Field( - proto.BOOL, - number=2, - ) - dimension_passed: MutableMapping[str, bool] = proto.MapField( - proto.STRING, - proto.BOOL, - number=3, - ) - score: float = proto.Field( - proto.FLOAT, - number=4, - ) - dimension_score: MutableMapping[str, float] = proto.MapField( - proto.STRING, - proto.FLOAT, - number=5, - ) - column_score: MutableMapping[str, float] = proto.MapField( - proto.STRING, - proto.FLOAT, - number=6, - ) - - class DataProfileAppliedConfigs(proto.Message): - r"""Applied configs for data profile type data scan job. - - Attributes: - sampling_percent (float): - The percentage of the records selected from the dataset for - DataScan. - - - Value ranges between 0.0 and 100.0. - - Value 0.0 or 100.0 imply that sampling was not applied. - row_filter_applied (bool): - Boolean indicating whether a row filter was - applied in the DataScan job. - column_filter_applied (bool): - Boolean indicating whether a column filter - was applied in the DataScan job. - """ - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=1, - ) - row_filter_applied: bool = proto.Field( - proto.BOOL, - number=2, - ) - column_filter_applied: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class DataQualityAppliedConfigs(proto.Message): - r"""Applied configs for data quality type data scan job. - - Attributes: - sampling_percent (float): - The percentage of the records selected from the dataset for - DataScan. - - - Value ranges between 0.0 and 100.0. - - Value 0.0 or 100.0 imply that sampling was not applied. - row_filter_applied (bool): - Boolean indicating whether a row filter was - applied in the DataScan job. - """ - - sampling_percent: float = proto.Field( - proto.FLOAT, - number=1, - ) - row_filter_applied: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class PostScanActionsResult(proto.Message): - r"""Post scan actions result for data scan job. - - Attributes: - bigquery_export_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult): - The result of BigQuery export post scan - action. - """ - - class BigQueryExportResult(proto.Message): - r"""The result of BigQuery export post scan action. - - Attributes: - state (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult.BigQueryExportResult.State): - Execution state for the BigQuery exporting. - message (str): - Additional information about the BigQuery - exporting. - """ - class State(proto.Enum): - r"""Execution state for the exporting. - - Values: - STATE_UNSPECIFIED (0): - The exporting state is unspecified. - SUCCEEDED (1): - The exporting completed successfully. - FAILED (2): - The exporting is no longer running due to an - error. - SKIPPED (3): - The exporting is skipped due to no valid scan - result to export (usually caused by scan - failed). - """ - STATE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - SKIPPED = 3 - - state: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult.State' = proto.Field( - proto.ENUM, - number=1, - enum='DataScanEvent.PostScanActionsResult.BigQueryExportResult.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - bigquery_export_result: 'DataScanEvent.PostScanActionsResult.BigQueryExportResult' = proto.Field( - proto.MESSAGE, - number=1, - message='DataScanEvent.PostScanActionsResult.BigQueryExportResult', - ) - - data_source: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - type_: ScanType = proto.Field( - proto.ENUM, - number=5, - enum=ScanType, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - message: str = proto.Field( - proto.STRING, - number=7, - ) - spec_version: str = proto.Field( - proto.STRING, - number=8, - ) - trigger: Trigger = proto.Field( - proto.ENUM, - number=9, - enum=Trigger, - ) - scope: Scope = proto.Field( - proto.ENUM, - number=10, - enum=Scope, - ) - data_profile: DataProfileResult = proto.Field( - proto.MESSAGE, - number=101, - oneof='result', - message=DataProfileResult, - ) - data_quality: DataQualityResult = proto.Field( - proto.MESSAGE, - number=102, - oneof='result', - message=DataQualityResult, - ) - data_profile_configs: DataProfileAppliedConfigs = proto.Field( - proto.MESSAGE, - number=201, - oneof='appliedConfigs', - message=DataProfileAppliedConfigs, - ) - data_quality_configs: DataQualityAppliedConfigs = proto.Field( - proto.MESSAGE, - number=202, - oneof='appliedConfigs', - message=DataQualityAppliedConfigs, - ) - post_scan_actions_result: PostScanActionsResult = proto.Field( - proto.MESSAGE, - number=11, - message=PostScanActionsResult, - ) - catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = proto.Field( - proto.MESSAGE, - number=13, - message=datascans_common.DataScanCatalogPublishingStatus, - ) - - -class DataQualityScanRuleResult(proto.Message): - r"""Information about the result of a data quality rule for data - quality scan. The monitored resource is 'DataScan'. - - Attributes: - job_id (str): - Identifier of the specific data scan job this - log entry is for. - data_source (str): - The data source of the data scan (e.g. - BigQuery table name). - column (str): - The column which this rule is evaluated - against. - rule_name (str): - The name of the data quality rule. - rule_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.RuleType): - The type of the data quality rule. - evalution_type (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.EvaluationType): - The evaluation type of the data quality rule. - rule_dimension (str): - The dimension of the data quality rule. - threshold_percent (float): - The passing threshold ([0.0, 100.0]) of the data quality - rule. - result (google.cloud.dataplex_v1.types.DataQualityScanRuleResult.Result): - The result of the data quality rule. - evaluated_row_count (int): - The number of rows evaluated against the data quality rule. - This field is only valid for rules of PER_ROW evaluation - type. - passed_row_count (int): - The number of rows which passed a rule evaluation. This - field is only valid for rules of PER_ROW evaluation type. - null_row_count (int): - The number of rows with null values in the - specified column. - assertion_row_count (int): - The number of rows returned by the SQL - statement in a SQL assertion rule. This field is - only valid for SQL assertion rules. - """ - class RuleType(proto.Enum): - r"""The type of the data quality rule. - - Values: - RULE_TYPE_UNSPECIFIED (0): - An unspecified rule type. - NON_NULL_EXPECTATION (1): - See - [DataQualityRule.NonNullExpectation][google.cloud.dataplex.v1.DataQualityRule.NonNullExpectation]. - RANGE_EXPECTATION (2): - See - [DataQualityRule.RangeExpectation][google.cloud.dataplex.v1.DataQualityRule.RangeExpectation]. - REGEX_EXPECTATION (3): - See - [DataQualityRule.RegexExpectation][google.cloud.dataplex.v1.DataQualityRule.RegexExpectation]. - ROW_CONDITION_EXPECTATION (4): - See - [DataQualityRule.RowConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectation]. - SET_EXPECTATION (5): - See - [DataQualityRule.SetExpectation][google.cloud.dataplex.v1.DataQualityRule.SetExpectation]. - STATISTIC_RANGE_EXPECTATION (6): - See - [DataQualityRule.StatisticRangeExpectation][google.cloud.dataplex.v1.DataQualityRule.StatisticRangeExpectation]. - TABLE_CONDITION_EXPECTATION (7): - See - [DataQualityRule.TableConditionExpectation][google.cloud.dataplex.v1.DataQualityRule.TableConditionExpectation]. - UNIQUENESS_EXPECTATION (8): - See - [DataQualityRule.UniquenessExpectation][google.cloud.dataplex.v1.DataQualityRule.UniquenessExpectation]. - SQL_ASSERTION (9): - See - [DataQualityRule.SqlAssertion][google.cloud.dataplex.v1.DataQualityRule.SqlAssertion]. - """ - RULE_TYPE_UNSPECIFIED = 0 - NON_NULL_EXPECTATION = 1 - RANGE_EXPECTATION = 2 - REGEX_EXPECTATION = 3 - ROW_CONDITION_EXPECTATION = 4 - SET_EXPECTATION = 5 - STATISTIC_RANGE_EXPECTATION = 6 - TABLE_CONDITION_EXPECTATION = 7 - UNIQUENESS_EXPECTATION = 8 - SQL_ASSERTION = 9 - - class EvaluationType(proto.Enum): - r"""The evaluation type of the data quality rule. - - Values: - EVALUATION_TYPE_UNSPECIFIED (0): - An unspecified evaluation type. - PER_ROW (1): - The rule evaluation is done at per row level. - AGGREGATE (2): - The rule evaluation is done for an aggregate - of rows. - """ - EVALUATION_TYPE_UNSPECIFIED = 0 - PER_ROW = 1 - AGGREGATE = 2 - - class Result(proto.Enum): - r"""Whether the data quality rule passed or failed. - - Values: - RESULT_UNSPECIFIED (0): - An unspecified result. - PASSED (1): - The data quality rule passed. - FAILED (2): - The data quality rule failed. - """ - RESULT_UNSPECIFIED = 0 - PASSED = 1 - FAILED = 2 - - job_id: str = proto.Field( - proto.STRING, - number=1, - ) - data_source: str = proto.Field( - proto.STRING, - number=2, - ) - column: str = proto.Field( - proto.STRING, - number=3, - ) - rule_name: str = proto.Field( - proto.STRING, - number=4, - ) - rule_type: RuleType = proto.Field( - proto.ENUM, - number=5, - enum=RuleType, - ) - evalution_type: EvaluationType = proto.Field( - proto.ENUM, - number=6, - enum=EvaluationType, - ) - rule_dimension: str = proto.Field( - proto.STRING, - number=7, - ) - threshold_percent: float = proto.Field( - proto.DOUBLE, - number=8, - ) - result: Result = proto.Field( - proto.ENUM, - number=9, - enum=Result, - ) - evaluated_row_count: int = proto.Field( - proto.INT64, - number=10, - ) - passed_row_count: int = proto.Field( - proto.INT64, - number=11, - ) - null_row_count: int = proto.Field( - proto.INT64, - number=12, - ) - assertion_row_count: int = proto.Field( - proto.INT64, - number=13, - ) - - -class BusinessGlossaryEvent(proto.Message): - r"""Payload associated with Business Glossary related log events. - - Attributes: - message (str): - The log message. - event_type (google.cloud.dataplex_v1.types.BusinessGlossaryEvent.EventType): - The type of the event. - resource (str): - Name of the resource. - """ - class EventType(proto.Enum): - r"""Type of glossary log event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - GLOSSARY_CREATE (1): - Glossary create event. - GLOSSARY_UPDATE (2): - Glossary update event. - GLOSSARY_DELETE (3): - Glossary delete event. - GLOSSARY_CATEGORY_CREATE (4): - Glossary category create event. - GLOSSARY_CATEGORY_UPDATE (5): - Glossary category update event. - GLOSSARY_CATEGORY_DELETE (6): - Glossary category delete event. - GLOSSARY_TERM_CREATE (7): - Glossary term create event. - GLOSSARY_TERM_UPDATE (8): - Glossary term update event. - GLOSSARY_TERM_DELETE (9): - Glossary term delete event. - """ - EVENT_TYPE_UNSPECIFIED = 0 - GLOSSARY_CREATE = 1 - GLOSSARY_UPDATE = 2 - GLOSSARY_DELETE = 3 - GLOSSARY_CATEGORY_CREATE = 4 - GLOSSARY_CATEGORY_UPDATE = 5 - GLOSSARY_CATEGORY_DELETE = 6 - GLOSSARY_TERM_CREATE = 7 - GLOSSARY_TERM_UPDATE = 8 - GLOSSARY_TERM_DELETE = 9 - - message: str = proto.Field( - proto.STRING, - number=1, - ) - event_type: EventType = proto.Field( - proto.ENUM, - number=2, - enum=EventType, - ) - resource: str = proto.Field( - proto.STRING, - number=3, - ) - - -class EntryLinkEvent(proto.Message): - r"""Payload associated with Entry related log events. - - Attributes: - message (str): - The log message. - event_type (google.cloud.dataplex_v1.types.EntryLinkEvent.EventType): - The type of the event. - resource (str): - Name of the resource. - """ - class EventType(proto.Enum): - r"""Type of entry link log event. - - Values: - EVENT_TYPE_UNSPECIFIED (0): - An unspecified event type. - ENTRY_LINK_CREATE (1): - EntryLink create event. - ENTRY_LINK_DELETE (2): - EntryLink delete event. - """ - EVENT_TYPE_UNSPECIFIED = 0 - ENTRY_LINK_CREATE = 1 - ENTRY_LINK_DELETE = 2 - - message: str = proto.Field( - proto.STRING, - number=1, - ) - event_type: EventType = proto.Field( - proto.ENUM, - number=2, - enum=EventType, - ) - resource: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py deleted file mode 100644 index 481b4e41a9d5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/metadata_.py +++ /dev/null @@ -1,1182 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'StorageSystem', - 'CreateEntityRequest', - 'UpdateEntityRequest', - 'DeleteEntityRequest', - 'ListEntitiesRequest', - 'ListEntitiesResponse', - 'GetEntityRequest', - 'ListPartitionsRequest', - 'CreatePartitionRequest', - 'DeletePartitionRequest', - 'ListPartitionsResponse', - 'GetPartitionRequest', - 'Entity', - 'Partition', - 'Schema', - 'StorageFormat', - 'StorageAccess', - }, -) - - -class StorageSystem(proto.Enum): - r"""Identifies the cloud system that manages the data storage. - - Values: - STORAGE_SYSTEM_UNSPECIFIED (0): - Storage system unspecified. - CLOUD_STORAGE (1): - The entity data is contained within a Cloud - Storage bucket. - BIGQUERY (2): - The entity data is contained within a - BigQuery dataset. - """ - STORAGE_SYSTEM_UNSPECIFIED = 0 - CLOUD_STORAGE = 1 - BIGQUERY = 2 - - -class CreateEntityRequest(proto.Message): - r"""Create a metadata entity request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - entity (google.cloud.dataplex_v1.types.Entity): - Required. Entity resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entity: 'Entity' = proto.Field( - proto.MESSAGE, - number=3, - message='Entity', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEntityRequest(proto.Message): - r"""Update a metadata entity request. - The exiting entity will be fully replaced by the entity in the - request. The entity ID is mutable. To modify the ID, use the - current entity ID in the request URL and specify the new ID in - the request body. - - Attributes: - entity (google.cloud.dataplex_v1.types.Entity): - Required. Update description. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - entity: 'Entity' = proto.Field( - proto.MESSAGE, - number=2, - message='Entity', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEntityRequest(proto.Message): - r"""Delete a metadata entity request. - - Attributes: - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - etag (str): - Required. The etag associated with the entity, which can be - retrieved with a [GetEntity][] request. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntitiesRequest(proto.Message): - r"""List metadata entities request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - view (google.cloud.dataplex_v1.types.ListEntitiesRequest.EntityView): - Required. Specify the entity view to make a - partial list request. - page_size (int): - Optional. Maximum number of entities to - return. The service may return fewer than this - value. If unspecified, 100 entities will be - returned by default. The maximum value is 500; - larger values will will be truncated to 500. - page_token (str): - Optional. Page token received from a previous - ``ListEntities`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEntities`` must match the call that - provided the page token. - filter (str): - Optional. The following filter parameters can be added to - the URL to limit the entities returned by the API: - - - Entity ID: ?filter="id=entityID" - - Asset ID: ?filter="asset=assetID" - - Data path ?filter="data_path=gs://my-bucket" - - Is HIVE compatible: ?filter="hive_compatible=true" - - Is BigQuery compatible: ?filter="bigquery_compatible=true". - """ - class EntityView(proto.Enum): - r"""Entity views. - - Values: - ENTITY_VIEW_UNSPECIFIED (0): - The default unset value. Return both table - and fileset entities if unspecified. - TABLES (1): - Only list table entities. - FILESETS (2): - Only list fileset entities. - """ - ENTITY_VIEW_UNSPECIFIED = 0 - TABLES = 1 - FILESETS = 2 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - view: EntityView = proto.Field( - proto.ENUM, - number=2, - enum=EntityView, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEntitiesResponse(proto.Message): - r"""List metadata entities response. - - Attributes: - entities (MutableSequence[google.cloud.dataplex_v1.types.Entity]): - Entities in the specified parent zone. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no remaining results in - the list. - """ - - @property - def raw_page(self): - return self - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEntityRequest(proto.Message): - r"""Get metadata entity request. - - Attributes: - name (str): - Required. The resource name of the entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}.`` - view (google.cloud.dataplex_v1.types.GetEntityRequest.EntityView): - Optional. Used to select the subset of entity information to - return. Defaults to ``BASIC``. - """ - class EntityView(proto.Enum): - r"""Entity views for get entity partial result. - - Values: - ENTITY_VIEW_UNSPECIFIED (0): - The API will default to the ``BASIC`` view. - BASIC (1): - Minimal view that does not include the - schema. - SCHEMA (2): - Include basic information and schema. - FULL (4): - Include everything. Currently, this is the - same as the SCHEMA view. - """ - ENTITY_VIEW_UNSPECIFIED = 0 - BASIC = 1 - SCHEMA = 2 - FULL = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: EntityView = proto.Field( - proto.ENUM, - number=2, - enum=EntityView, - ) - - -class ListPartitionsRequest(proto.Message): - r"""List metadata partitions request. - - Attributes: - parent (str): - Required. The resource name of the parent entity: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - page_size (int): - Optional. Maximum number of partitions to - return. The service may return fewer than this - value. If unspecified, 100 partitions will be - returned by default. The maximum page size is - 500; larger values will will be truncated to - 500. - page_token (str): - Optional. Page token received from a previous - ``ListPartitions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListPartitions`` must match the call that - provided the page token. - filter (str): - Optional. Filter the partitions returned to the caller using - a key value pair expression. Supported operators and syntax: - - - logic operators: AND, OR - - comparison operators: <, >, >=, <= ,=, != - - LIKE operators: - - - The right hand of a LIKE operator supports "." and "\*" - for wildcard searches, for example "value1 LIKE - ".\ *oo.*" - - - parenthetical grouping: ( ) - - Sample filter expression: \`?filter="key1 < value1 OR key2 > - value2" - - **Notes:** - - - Keys to the left of operators are case insensitive. - - Partition results are sorted first by creation time, then - by lexicographic order. - - Up to 20 key value filter pairs are allowed, but due to - performance considerations, only the first 10 will be used - as a filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CreatePartitionRequest(proto.Message): - r"""Create metadata partition request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - partition (google.cloud.dataplex_v1.types.Partition): - Required. Partition resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - partition: 'Partition' = proto.Field( - proto.MESSAGE, - number=3, - message='Partition', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class DeletePartitionRequest(proto.Message): - r"""Delete metadata partition request. - - Attributes: - name (str): - Required. The resource name of the partition. format: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an ordered - sequence of partition values separated by "/". All values - must be provided. - etag (str): - Optional. The etag associated with the - partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListPartitionsResponse(proto.Message): - r"""List metadata partitions response. - - Attributes: - partitions (MutableSequence[google.cloud.dataplex_v1.types.Partition]): - Partitions under the specified parent entity. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no remaining results in - the list. - """ - - @property - def raw_page(self): - return self - - partitions: MutableSequence['Partition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Partition', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetPartitionRequest(proto.Message): - r"""Get metadata partition request. - - Attributes: - name (str): - Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. - The {partition_value_path} segment consists of an ordered - sequence of partition values separated by "/". All values - must be provided. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Entity(proto.Message): - r"""Represents tables and fileset metadata contained within a - zone. - - Attributes: - name (str): - Output only. The resource name of the entity, of the form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{id}``. - display_name (str): - Optional. Display name must be shorter than - or equal to 256 characters. - description (str): - Optional. User friendly longer description - text. Must be shorter than or equal to 1024 - characters. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entity was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the entity was - last updated. - id (str): - Required. A user-provided entity ID. It is - mutable, and will be used as the published table - name. Specifying a new ID in an update entity - request will override the existing value. - The ID must contain only letters (a-z, A-Z), - numbers (0-9), and underscores, and consist of - 256 or fewer characters. - etag (str): - Optional. The etag associated with the entity, which can be - retrieved with a [GetEntity][] request. Required for update - and delete requests. - type_ (google.cloud.dataplex_v1.types.Entity.Type): - Required. Immutable. The type of entity. - asset (str): - Required. Immutable. The ID of the asset - associated with the storage location containing - the entity data. The entity must be with in the - same zone with the asset. - data_path (str): - Required. Immutable. The storage path of the entity data. - For Cloud Storage data, this is the fully-qualified path to - the entity, such as ``gs://bucket/path/to/data``. For - BigQuery data, this is the name of the table resource, such - as - ``projects/project_id/datasets/dataset_id/tables/table_id``. - data_path_pattern (str): - Optional. The set of items within the data path constituting - the data in the entity, represented as a glob path. Example: - ``gs://bucket/path/to/data/**/*.csv``. - catalog_entry (str): - Output only. The name of the associated Data - Catalog entry. - system (google.cloud.dataplex_v1.types.StorageSystem): - Required. Immutable. Identifies the storage - system of the entity data. - format_ (google.cloud.dataplex_v1.types.StorageFormat): - Required. Identifies the storage format of - the entity data. It does not apply to entities - with data stored in BigQuery. - compatibility (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus): - Output only. Metadata stores that the entity - is compatible with. - access (google.cloud.dataplex_v1.types.StorageAccess): - Output only. Identifies the access mechanism - to the entity. Not user settable. - uid (str): - Output only. System generated unique ID for - the Entity. This ID will be different if the - Entity is deleted and re-created with the same - name. - schema (google.cloud.dataplex_v1.types.Schema): - Required. The description of the data structure and layout. - The schema is not included in list responses. It is only - included in ``SCHEMA`` and ``FULL`` entity views of a - ``GetEntity`` response. - """ - class Type(proto.Enum): - r"""The type of entity. - - Values: - TYPE_UNSPECIFIED (0): - Type unspecified. - TABLE (1): - Structured and semi-structured data. - FILESET (2): - Unstructured data. - """ - TYPE_UNSPECIFIED = 0 - TABLE = 1 - FILESET = 2 - - class CompatibilityStatus(proto.Message): - r"""Provides compatibility information for various metadata - stores. - - Attributes: - hive_metastore (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): - Output only. Whether this entity is - compatible with Hive Metastore. - bigquery (google.cloud.dataplex_v1.types.Entity.CompatibilityStatus.Compatibility): - Output only. Whether this entity is - compatible with BigQuery. - """ - - class Compatibility(proto.Message): - r"""Provides compatibility information for a specific metadata - store. - - Attributes: - compatible (bool): - Output only. Whether the entity is compatible - and can be represented in the metadata store. - reason (str): - Output only. Provides additional detail if - the entity is incompatible with the metadata - store. - """ - - compatible: bool = proto.Field( - proto.BOOL, - number=1, - ) - reason: str = proto.Field( - proto.STRING, - number=2, - ) - - hive_metastore: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( - proto.MESSAGE, - number=1, - message='Entity.CompatibilityStatus.Compatibility', - ) - bigquery: 'Entity.CompatibilityStatus.Compatibility' = proto.Field( - proto.MESSAGE, - number=2, - message='Entity.CompatibilityStatus.Compatibility', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - id: str = proto.Field( - proto.STRING, - number=7, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - type_: Type = proto.Field( - proto.ENUM, - number=10, - enum=Type, - ) - asset: str = proto.Field( - proto.STRING, - number=11, - ) - data_path: str = proto.Field( - proto.STRING, - number=12, - ) - data_path_pattern: str = proto.Field( - proto.STRING, - number=13, - ) - catalog_entry: str = proto.Field( - proto.STRING, - number=14, - ) - system: 'StorageSystem' = proto.Field( - proto.ENUM, - number=15, - enum='StorageSystem', - ) - format_: 'StorageFormat' = proto.Field( - proto.MESSAGE, - number=16, - message='StorageFormat', - ) - compatibility: CompatibilityStatus = proto.Field( - proto.MESSAGE, - number=19, - message=CompatibilityStatus, - ) - access: 'StorageAccess' = proto.Field( - proto.MESSAGE, - number=21, - message='StorageAccess', - ) - uid: str = proto.Field( - proto.STRING, - number=22, - ) - schema: 'Schema' = proto.Field( - proto.MESSAGE, - number=50, - message='Schema', - ) - - -class Partition(proto.Message): - r"""Represents partition metadata contained within entity - instances. - - Attributes: - name (str): - Output only. Partition values used in the HTTP URL must be - double encoded. For example, - ``url_encode(url_encode(value))`` can be used to encode - "US:CA/CA#Sunnyvale so that the request URL ends with - "/partitions/US%253ACA/CA%2523Sunnyvale". The name field in - the response retains the encoded format. - values (MutableSequence[str]): - Required. Immutable. The set of values - representing the partition, which correspond to - the partition schema defined in the parent - entity. - location (str): - Required. Immutable. The location of the entity data within - the partition, for example, - ``gs://bucket/path/to/entity/key1=value1/key2=value2``. Or - ``projects//datasets//tables/`` - etag (str): - Optional. The etag for this partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - etag: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Schema(proto.Message): - r"""Schema information describing the structure and layout of the - data. - - Attributes: - user_managed (bool): - Required. Set to ``true`` if user-managed or ``false`` if - managed by Dataplex Universal Catalog. The default is - ``false`` (managed by Dataplex Universal Catalog). - - - Set to ``false``\ to enable Dataplex Universal Catalog - discovery to update the schema. including new data - discovery, schema inference, and schema evolution. Users - retain the ability to input and edit the schema. Dataplex - Universal Catalog treats schema input by the user as - though produced by a previous Dataplex Universal Catalog - discovery operation, and it will evolve the schema and - take action based on that treatment. - - - Set to ``true`` to fully manage the entity schema. This - setting guarantees that Dataplex Universal Catalog will - not change schema fields. - fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): - Optional. The sequence of fields describing data in table - entities. **Note:** BigQuery SchemaFields are immutable. - partition_fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.PartitionField]): - Optional. The sequence of fields describing - the partition structure in entities. If this - field is empty, there are no partitions within - the data. - partition_style (google.cloud.dataplex_v1.types.Schema.PartitionStyle): - Optional. The structure of paths containing - partition data within the entity. - """ - class Type(proto.Enum): - r"""Type information for fields in schemas and partition schemas. - - Values: - TYPE_UNSPECIFIED (0): - SchemaType unspecified. - BOOLEAN (1): - Boolean field. - BYTE (2): - Single byte numeric field. - INT16 (3): - 16-bit numeric field. - INT32 (4): - 32-bit numeric field. - INT64 (5): - 64-bit numeric field. - FLOAT (6): - Floating point numeric field. - DOUBLE (7): - Double precision numeric field. - DECIMAL (8): - Real value numeric field. - STRING (9): - Sequence of characters field. - BINARY (10): - Sequence of bytes field. - TIMESTAMP (11): - Date and time field. - DATE (12): - Date field. - TIME (13): - Time field. - RECORD (14): - Structured field. Nested fields that define - the structure of the map. If all nested fields - are nullable, this field represents a union. - NULL (100): - Null field that does not have values. - """ - TYPE_UNSPECIFIED = 0 - BOOLEAN = 1 - BYTE = 2 - INT16 = 3 - INT32 = 4 - INT64 = 5 - FLOAT = 6 - DOUBLE = 7 - DECIMAL = 8 - STRING = 9 - BINARY = 10 - TIMESTAMP = 11 - DATE = 12 - TIME = 13 - RECORD = 14 - NULL = 100 - - class Mode(proto.Enum): - r"""Additional qualifiers to define field semantics. - - Values: - MODE_UNSPECIFIED (0): - Mode unspecified. - REQUIRED (1): - The field has required semantics. - NULLABLE (2): - The field has optional semantics, and may be - null. - REPEATED (3): - The field has repeated (0 or more) semantics, - and is a list of values. - """ - MODE_UNSPECIFIED = 0 - REQUIRED = 1 - NULLABLE = 2 - REPEATED = 3 - - class PartitionStyle(proto.Enum): - r"""The structure of paths within the entity, which represent - partitions. - - Values: - PARTITION_STYLE_UNSPECIFIED (0): - PartitionStyle unspecified - HIVE_COMPATIBLE (1): - Partitions are hive-compatible. Examples: - ``gs://bucket/path/to/table/dt=2019-10-31/lang=en``, - ``gs://bucket/path/to/table/dt=2019-10-31/lang=en/late``. - """ - PARTITION_STYLE_UNSPECIFIED = 0 - HIVE_COMPATIBLE = 1 - - class SchemaField(proto.Message): - r"""Represents a column field within a table schema. - - Attributes: - name (str): - Required. The name of the field. Must contain - only letters, numbers and underscores, with a - maximum length of 767 characters, and must begin - with a letter or underscore. - description (str): - Optional. User friendly field description. - Must be less than or equal to 1024 characters. - type_ (google.cloud.dataplex_v1.types.Schema.Type): - Required. The type of field. - mode (google.cloud.dataplex_v1.types.Schema.Mode): - Required. Additional field semantics. - fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): - Optional. Any nested field for complex types. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - type_: 'Schema.Type' = proto.Field( - proto.ENUM, - number=3, - enum='Schema.Type', - ) - mode: 'Schema.Mode' = proto.Field( - proto.ENUM, - number=4, - enum='Schema.Mode', - ) - fields: MutableSequence['Schema.SchemaField'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='Schema.SchemaField', - ) - - class PartitionField(proto.Message): - r"""Represents a key field within the entity's partition structure. You - could have up to 20 partition fields, but only the first 10 - partitions have the filtering ability due to performance - consideration. **Note:** Partition fields are immutable. - - Attributes: - name (str): - Required. Partition field name must consist - of letters, numbers, and underscores only, with - a maximum of length of 256 characters, and must - begin with a letter or underscore.. - type_ (google.cloud.dataplex_v1.types.Schema.Type): - Required. Immutable. The type of field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Schema.Type' = proto.Field( - proto.ENUM, - number=2, - enum='Schema.Type', - ) - - user_managed: bool = proto.Field( - proto.BOOL, - number=1, - ) - fields: MutableSequence[SchemaField] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=SchemaField, - ) - partition_fields: MutableSequence[PartitionField] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=PartitionField, - ) - partition_style: PartitionStyle = proto.Field( - proto.ENUM, - number=4, - enum=PartitionStyle, - ) - - -class StorageFormat(proto.Message): - r"""Describes the format of the data within its storage location. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - format_ (google.cloud.dataplex_v1.types.StorageFormat.Format): - Output only. The data format associated with - the stored data, which represents content type - values. The value is inferred from mime type. - compression_format (google.cloud.dataplex_v1.types.StorageFormat.CompressionFormat): - Optional. The compression type associated - with the stored data. If unspecified, the data - is uncompressed. - mime_type (str): - Required. The mime type descriptor for the - data. Must match the pattern {type}/{subtype}. - Supported values: - - - application/x-parquet - - application/x-avro - - application/x-orc - - application/x-tfrecord - - application/x-parquet+iceberg - - application/x-avro+iceberg - - application/x-orc+iceberg - - application/json - - application/{subtypes} - - text/csv - - text/ - - image/{image subtype} - - video/{video subtype} - - audio/{audio subtype} - csv (google.cloud.dataplex_v1.types.StorageFormat.CsvOptions): - Optional. Additional information about CSV - formatted data. - - This field is a member of `oneof`_ ``options``. - json (google.cloud.dataplex_v1.types.StorageFormat.JsonOptions): - Optional. Additional information about CSV - formatted data. - - This field is a member of `oneof`_ ``options``. - iceberg (google.cloud.dataplex_v1.types.StorageFormat.IcebergOptions): - Optional. Additional information about - iceberg tables. - - This field is a member of `oneof`_ ``options``. - """ - class Format(proto.Enum): - r"""The specific file format of the data. - - Values: - FORMAT_UNSPECIFIED (0): - Format unspecified. - PARQUET (1): - Parquet-formatted structured data. - AVRO (2): - Avro-formatted structured data. - ORC (3): - Orc-formatted structured data. - CSV (100): - Csv-formatted semi-structured data. - JSON (101): - Json-formatted semi-structured data. - IMAGE (200): - Image data formats (such as jpg and png). - AUDIO (201): - Audio data formats (such as mp3, and wav). - VIDEO (202): - Video data formats (such as mp4 and mpg). - TEXT (203): - Textual data formats (such as txt and xml). - TFRECORD (204): - TensorFlow record format. - OTHER (1000): - Data that doesn't match a specific format. - UNKNOWN (1001): - Data of an unknown format. - """ - FORMAT_UNSPECIFIED = 0 - PARQUET = 1 - AVRO = 2 - ORC = 3 - CSV = 100 - JSON = 101 - IMAGE = 200 - AUDIO = 201 - VIDEO = 202 - TEXT = 203 - TFRECORD = 204 - OTHER = 1000 - UNKNOWN = 1001 - - class CompressionFormat(proto.Enum): - r"""The specific compressed file format of the data. - - Values: - COMPRESSION_FORMAT_UNSPECIFIED (0): - CompressionFormat unspecified. Implies - uncompressed data. - GZIP (2): - GZip compressed set of files. - BZIP2 (3): - BZip2 compressed set of files. - """ - COMPRESSION_FORMAT_UNSPECIFIED = 0 - GZIP = 2 - BZIP2 = 3 - - class CsvOptions(proto.Message): - r"""Describes CSV and similar semi-structured data formats. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - Accepts "US-ASCII", "UTF-8", and "ISO-8859-1". - Defaults to UTF-8 if unspecified. - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. Defaults to 0. - delimiter (str): - Optional. The delimiter used to separate - values. Defaults to ','. - quote (str): - Optional. The character used to quote column - values. Accepts '"' (double quotation mark) or - ''' (single quotation mark). Defaults to '"' - (double quotation mark) if unspecified. - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - header_rows: int = proto.Field( - proto.INT32, - number=2, - ) - delimiter: str = proto.Field( - proto.STRING, - number=3, - ) - quote: str = proto.Field( - proto.STRING, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describes JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - Accepts "US-ASCII", "UTF-8" and "ISO-8859-1". - Defaults to UTF-8 if not specified. - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - - class IcebergOptions(proto.Message): - r"""Describes Iceberg data format. - - Attributes: - metadata_location (str): - Optional. The location of where the iceberg - metadata is present, must be within the table - path - """ - - metadata_location: str = proto.Field( - proto.STRING, - number=1, - ) - - format_: Format = proto.Field( - proto.ENUM, - number=1, - enum=Format, - ) - compression_format: CompressionFormat = proto.Field( - proto.ENUM, - number=2, - enum=CompressionFormat, - ) - mime_type: str = proto.Field( - proto.STRING, - number=3, - ) - csv: CsvOptions = proto.Field( - proto.MESSAGE, - number=10, - oneof='options', - message=CsvOptions, - ) - json: JsonOptions = proto.Field( - proto.MESSAGE, - number=11, - oneof='options', - message=JsonOptions, - ) - iceberg: IcebergOptions = proto.Field( - proto.MESSAGE, - number=12, - oneof='options', - message=IcebergOptions, - ) - - -class StorageAccess(proto.Message): - r"""Describes the access mechanism of the data within its storage - location. - - Attributes: - read (google.cloud.dataplex_v1.types.StorageAccess.AccessMode): - Output only. Describes the read access - mechanism of the data. Not user settable. - """ - class AccessMode(proto.Enum): - r"""Access Mode determines how data stored within the Entity is - read. - - Values: - ACCESS_MODE_UNSPECIFIED (0): - Access mode unspecified. - DIRECT (1): - Default. Data is accessed directly using - storage APIs. - MANAGED (2): - Data is accessed through a managed interface - using BigQuery APIs. - """ - ACCESS_MODE_UNSPECIFIED = 0 - DIRECT = 1 - MANAGED = 2 - - read: AccessMode = proto.Field( - proto.ENUM, - number=21, - enum=AccessMode, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py deleted file mode 100644 index 0a6a6d6e6b8b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/processing.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Trigger', - 'DataSource', - 'ScannedData', - }, -) - - -class Trigger(proto.Message): - r"""DataScan scheduling and trigger settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - on_demand (google.cloud.dataplex_v1.types.Trigger.OnDemand): - The scan runs once via ``RunDataScan`` API. - - This field is a member of `oneof`_ ``mode``. - schedule (google.cloud.dataplex_v1.types.Trigger.Schedule): - The scan is scheduled to run periodically. - - This field is a member of `oneof`_ ``mode``. - """ - - class OnDemand(proto.Message): - r"""The scan runs once via ``RunDataScan`` API. - """ - - class Schedule(proto.Message): - r"""The scan is scheduled to run periodically. - - Attributes: - cron (str): - Required. `Cron `__ - schedule for running scans periodically. - - To explicitly set a timezone in the cron tab, apply a prefix - in the cron tab: **"CRON_TZ=${IANA_TIME_ZONE}"** or - **"TZ=${IANA_TIME_ZONE}"**. The **${IANA_TIME_ZONE}** may - only be a valid string from IANA time zone database - (`wikipedia `__). - For example, ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. - - This field is required for Schedule scans. - """ - - cron: str = proto.Field( - proto.STRING, - number=1, - ) - - on_demand: OnDemand = proto.Field( - proto.MESSAGE, - number=100, - oneof='mode', - message=OnDemand, - ) - schedule: Schedule = proto.Field( - proto.MESSAGE, - number=101, - oneof='mode', - message=Schedule, - ) - - -class DataSource(proto.Message): - r"""The data source for DataScan. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - entity (str): - Immutable. The Dataplex Universal Catalog entity that - represents the data source (e.g. BigQuery table) for - DataScan, of the form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. - - This field is a member of `oneof`_ ``source``. - resource (str): - Immutable. The service-qualified full resource name of the - cloud resource for a DataScan job to scan against. The field - could either be: Cloud Storage bucket for DataDiscoveryScan - Format: - //storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID - or BigQuery table of type "TABLE" for - DataProfileScan/DataQualityScan Format: - //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - - This field is a member of `oneof`_ ``source``. - """ - - entity: str = proto.Field( - proto.STRING, - number=100, - oneof='source', - ) - resource: str = proto.Field( - proto.STRING, - number=101, - oneof='source', - ) - - -class ScannedData(proto.Message): - r"""The data scanned during processing (e.g. in incremental - DataScan) - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - incremental_field (google.cloud.dataplex_v1.types.ScannedData.IncrementalField): - The range denoted by values of an incremental - field - - This field is a member of `oneof`_ ``data_range``. - """ - - class IncrementalField(proto.Message): - r"""A data range denoted by a pair of start/end values of a - field. - - Attributes: - field (str): - Output only. The field that contains values - which monotonically increases over time (e.g. a - timestamp column). - start (str): - Output only. Value that marks the start of - the range. - end (str): - Output only. Value that marks the end of the - range. - """ - - field: str = proto.Field( - proto.STRING, - number=1, - ) - start: str = proto.Field( - proto.STRING, - number=2, - ) - end: str = proto.Field( - proto.STRING, - number=3, - ) - - incremental_field: IncrementalField = proto.Field( - proto.MESSAGE, - number=1, - oneof='data_range', - message=IncrementalField, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py deleted file mode 100644 index ad2981fa94d6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/resources.py +++ /dev/null @@ -1,1436 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'State', - 'Lake', - 'AssetStatus', - 'Zone', - 'Action', - 'Asset', - }, -) - - -class State(proto.Enum): - r"""State of a resource. - - Values: - STATE_UNSPECIFIED (0): - State is not specified. - ACTIVE (1): - Resource is active, i.e., ready to use. - CREATING (2): - Resource is under creation. - DELETING (3): - Resource is under deletion. - ACTION_REQUIRED (4): - Resource is active but has unresolved - actions. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - DELETING = 3 - ACTION_REQUIRED = 4 - - -class Lake(proto.Message): - r"""A lake is a centralized repository for managing enterprise - data across the organization distributed across many cloud - projects, and stored in a variety of storage services such as - Google Cloud Storage and BigQuery. The resources attached to a - lake are referred to as managed resources. Data within these - managed resources can be structured or unstructured. A lake - provides data admins with tools to organize, secure and manage - their data at scale, and provides data scientists and data - engineers an integrated experience to easily search, discover, - analyze and transform data and associated metadata. - - Attributes: - name (str): - Output only. The relative resource name of the lake, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the lake. This ID will be different if - the lake is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the lake was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the lake was last - updated. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the lake. - description (str): - Optional. Description of the lake. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the lake. - service_account (str): - Output only. Service account associated with - this lake. This service account must be - authorized to access or operate on resources - managed by the lake. - metastore (google.cloud.dataplex_v1.types.Lake.Metastore): - Optional. Settings to manage lake and - Dataproc Metastore service instance association. - asset_status (google.cloud.dataplex_v1.types.AssetStatus): - Output only. Aggregated status of the - underlying assets of the lake. - metastore_status (google.cloud.dataplex_v1.types.Lake.MetastoreStatus): - Output only. Metastore status of the lake. - """ - - class Metastore(proto.Message): - r"""Settings to manage association of Dataproc Metastore with a - lake. - - Attributes: - service (str): - Optional. A relative reference to the Dataproc Metastore - (https://cloud.google.com/dataproc-metastore/docs) service - associated with the lake: - ``projects/{project_id}/locations/{location_id}/services/{service_id}`` - """ - - service: str = proto.Field( - proto.STRING, - number=1, - ) - - class MetastoreStatus(proto.Message): - r"""Status of Lake and Dataproc Metastore service instance - association. - - Attributes: - state (google.cloud.dataplex_v1.types.Lake.MetastoreStatus.State): - Current state of association. - message (str): - Additional information about the current - status. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the metastore status of - the lake. - endpoint (str): - The URI of the endpoint used to access the - Metastore service. - """ - class State(proto.Enum): - r"""Current state of association. - - Values: - STATE_UNSPECIFIED (0): - Unspecified. - NONE (1): - A Metastore service instance is not - associated with the lake. - READY (2): - A Metastore service instance is attached to - the lake. - UPDATING (3): - Attach/detach is in progress. - ERROR (4): - Attach/detach could not be done due to - errors. - """ - STATE_UNSPECIFIED = 0 - NONE = 1 - READY = 2 - UPDATING = 3 - ERROR = 4 - - state: 'Lake.MetastoreStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Lake.MetastoreStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - endpoint: str = proto.Field( - proto.STRING, - number=4, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - service_account: str = proto.Field( - proto.STRING, - number=9, - ) - metastore: Metastore = proto.Field( - proto.MESSAGE, - number=102, - message=Metastore, - ) - asset_status: 'AssetStatus' = proto.Field( - proto.MESSAGE, - number=103, - message='AssetStatus', - ) - metastore_status: MetastoreStatus = proto.Field( - proto.MESSAGE, - number=104, - message=MetastoreStatus, - ) - - -class AssetStatus(proto.Message): - r"""Aggregated status of the underlying assets of a lake or zone. - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - active_assets (int): - Number of active assets. - security_policy_applying_assets (int): - Number of assets that are in process of - updating the security policy on attached - resources. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - active_assets: int = proto.Field( - proto.INT32, - number=2, - ) - security_policy_applying_assets: int = proto.Field( - proto.INT32, - number=3, - ) - - -class Zone(proto.Message): - r"""A zone represents a logical group of related assets within a - lake. A zone can be used to map to organizational structure or - represent stages of data readiness from raw to curated. It - provides managing behavior that is shared or inherited by all - contained assets. - - Attributes: - name (str): - Output only. The relative resource name of the zone, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the zone. This ID will be different if - the zone is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the zone was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the zone was last - updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the zone. - description (str): - Optional. Description of the zone. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the zone. - type_ (google.cloud.dataplex_v1.types.Zone.Type): - Required. Immutable. The type of the zone. - discovery_spec (google.cloud.dataplex_v1.types.Zone.DiscoverySpec): - Optional. Specification of the discovery - feature applied to data in this zone. - resource_spec (google.cloud.dataplex_v1.types.Zone.ResourceSpec): - Required. Specification of the resources that - are referenced by the assets within this zone. - asset_status (google.cloud.dataplex_v1.types.AssetStatus): - Output only. Aggregated status of the - underlying assets of the zone. - """ - class Type(proto.Enum): - r"""Type of zone. - - Values: - TYPE_UNSPECIFIED (0): - Zone type not specified. - RAW (1): - A zone that contains data that needs further - processing before it is considered generally - ready for consumption and analytics workloads. - CURATED (2): - A zone that contains data that is considered - to be ready for broader consumption and - analytics workloads. Curated structured data - stored in Cloud Storage must conform to certain - file formats (parquet, avro and orc) and - organized in a hive-compatible directory layout. - """ - TYPE_UNSPECIFIED = 0 - RAW = 1 - CURATED = 2 - - class ResourceSpec(proto.Message): - r"""Settings for resources attached as assets within a zone. - - Attributes: - location_type (google.cloud.dataplex_v1.types.Zone.ResourceSpec.LocationType): - Required. Immutable. The location type of the - resources that are allowed to be attached to the - assets within this zone. - """ - class LocationType(proto.Enum): - r"""Location type of the resources attached to a zone. - - Values: - LOCATION_TYPE_UNSPECIFIED (0): - Unspecified location type. - SINGLE_REGION (1): - Resources that are associated with a single - region. - MULTI_REGION (2): - Resources that are associated with a - multi-region location. - """ - LOCATION_TYPE_UNSPECIFIED = 0 - SINGLE_REGION = 1 - MULTI_REGION = 2 - - location_type: 'Zone.ResourceSpec.LocationType' = proto.Field( - proto.ENUM, - number=1, - enum='Zone.ResourceSpec.LocationType', - ) - - class DiscoverySpec(proto.Message): - r"""Settings to manage the metadata discovery and publishing in a - zone. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enabled (bool): - Required. Whether discovery is enabled. - include_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to include during discovery if - only a subset of the data should considered. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - exclude_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to exclude during discovery. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - csv_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.CsvOptions): - Optional. Configuration for CSV data. - json_options (google.cloud.dataplex_v1.types.Zone.DiscoverySpec.JsonOptions): - Optional. Configuration for Json data. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running discovery periodically. Successive discovery - runs must be scheduled at least 60 minutes apart. The - default value is to run discovery every 60 minutes. - - To explicitly set a timezone to the cron tab, apply a prefix - in the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or - TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a - valid string from IANA time zone database. For example, - ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. - - This field is a member of `oneof`_ ``trigger``. - """ - - class CsvOptions(proto.Message): - r"""Describe CSV and similar semi-structured data formats. - - Attributes: - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. - delimiter (str): - Optional. The delimiter being used to - separate values. This defaults to ','. - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for CSV data. If true, all columns - will be registered as strings. - """ - - header_rows: int = proto.Field( - proto.INT32, - number=1, - ) - delimiter: str = proto.Field( - proto.STRING, - number=2, - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describe JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for Json data. If true, all columns - will be registered as their primitive types - (strings, number or boolean). - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=2, - ) - - enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - include_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - csv_options: 'Zone.DiscoverySpec.CsvOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='Zone.DiscoverySpec.CsvOptions', - ) - json_options: 'Zone.DiscoverySpec.JsonOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='Zone.DiscoverySpec.JsonOptions', - ) - schedule: str = proto.Field( - proto.STRING, - number=10, - oneof='trigger', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - type_: Type = proto.Field( - proto.ENUM, - number=9, - enum=Type, - ) - discovery_spec: DiscoverySpec = proto.Field( - proto.MESSAGE, - number=103, - message=DiscoverySpec, - ) - resource_spec: ResourceSpec = proto.Field( - proto.MESSAGE, - number=104, - message=ResourceSpec, - ) - asset_status: 'AssetStatus' = proto.Field( - proto.MESSAGE, - number=105, - message='AssetStatus', - ) - - -class Action(proto.Message): - r"""Action represents an issue requiring administrator action for - resolution. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - category (google.cloud.dataplex_v1.types.Action.Category): - The category of issue associated with the - action. - issue (str): - Detailed description of the issue requiring - action. - detect_time (google.protobuf.timestamp_pb2.Timestamp): - The time that the issue was detected. - name (str): - Output only. The relative resource name of the action, of - the form: - ``projects/{project}/locations/{location}/lakes/{lake}/actions/{action}`` - ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/actions/{action}`` - ``projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}/actions/{action}``. - lake (str): - Output only. The relative resource name of the lake, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - zone (str): - Output only. The relative resource name of the zone, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - asset (str): - Output only. The relative resource name of the asset, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - data_locations (MutableSequence[str]): - The list of data locations associated with this action. - Cloud Storage locations are represented as URI paths(E.g. - ``gs://bucket/table1/year=2020/month=Jan/``). BigQuery - locations refer to resource names(E.g. - ``bigquery.googleapis.com/projects/project-id/datasets/dataset-id``). - invalid_data_format (google.cloud.dataplex_v1.types.Action.InvalidDataFormat): - Details for issues related to invalid or - unsupported data formats. - - This field is a member of `oneof`_ ``details``. - incompatible_data_schema (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema): - Details for issues related to incompatible - schemas detected within data. - - This field is a member of `oneof`_ ``details``. - invalid_data_partition (google.cloud.dataplex_v1.types.Action.InvalidDataPartition): - Details for issues related to invalid or - unsupported data partition structure. - - This field is a member of `oneof`_ ``details``. - missing_data (google.cloud.dataplex_v1.types.Action.MissingData): - Details for issues related to absence of data - within managed resources. - - This field is a member of `oneof`_ ``details``. - missing_resource (google.cloud.dataplex_v1.types.Action.MissingResource): - Details for issues related to absence of a - managed resource. - - This field is a member of `oneof`_ ``details``. - unauthorized_resource (google.cloud.dataplex_v1.types.Action.UnauthorizedResource): - Details for issues related to lack of - permissions to access data resources. - - This field is a member of `oneof`_ ``details``. - failed_security_policy_apply (google.cloud.dataplex_v1.types.Action.FailedSecurityPolicyApply): - Details for issues related to applying - security policy. - - This field is a member of `oneof`_ ``details``. - invalid_data_organization (google.cloud.dataplex_v1.types.Action.InvalidDataOrganization): - Details for issues related to invalid data - arrangement. - - This field is a member of `oneof`_ ``details``. - """ - class Category(proto.Enum): - r"""The category of issues. - - Values: - CATEGORY_UNSPECIFIED (0): - Unspecified category. - RESOURCE_MANAGEMENT (1): - Resource management related issues. - SECURITY_POLICY (2): - Security policy related issues. - DATA_DISCOVERY (3): - Data and discovery related issues. - """ - CATEGORY_UNSPECIFIED = 0 - RESOURCE_MANAGEMENT = 1 - SECURITY_POLICY = 2 - DATA_DISCOVERY = 3 - - class MissingResource(proto.Message): - r"""Action details for resource references in assets that cannot - be located. - - """ - - class UnauthorizedResource(proto.Message): - r"""Action details for unauthorized resource issues raised to - indicate that the service account associated with the lake - instance is not authorized to access or manage the resource - associated with an asset. - - """ - - class FailedSecurityPolicyApply(proto.Message): - r"""Failed to apply security policy to the managed resource(s) - under a lake, zone or an asset. For a lake or zone resource, one - or more underlying assets has a failure applying security policy - to the associated managed resource. - - Attributes: - asset (str): - Resource name of one of the assets with - failing security policy application. Populated - for a lake or zone resource only. - """ - - asset: str = proto.Field( - proto.STRING, - number=1, - ) - - class InvalidDataFormat(proto.Message): - r"""Action details for invalid or unsupported data files detected - by discovery. - - Attributes: - sampled_data_locations (MutableSequence[str]): - The list of data locations sampled and used - for format/schema inference. - expected_format (str): - The expected data format of the entity. - new_format (str): - The new unexpected data format within the - entity. - """ - - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - expected_format: str = proto.Field( - proto.STRING, - number=2, - ) - new_format: str = proto.Field( - proto.STRING, - number=3, - ) - - class IncompatibleDataSchema(proto.Message): - r"""Action details for incompatible schemas detected by - discovery. - - Attributes: - table (str): - The name of the table containing invalid - data. - existing_schema (str): - The existing and expected schema of the - table. The schema is provided as a JSON - formatted structure listing columns and data - types. - new_schema (str): - The new and incompatible schema within the - table. The schema is provided as a JSON - formatted structured listing columns and data - types. - sampled_data_locations (MutableSequence[str]): - The list of data locations sampled and used - for format/schema inference. - schema_change (google.cloud.dataplex_v1.types.Action.IncompatibleDataSchema.SchemaChange): - Whether the action relates to a schema that - is incompatible or modified. - """ - class SchemaChange(proto.Enum): - r"""Whether the action relates to a schema that is incompatible - or modified. - - Values: - SCHEMA_CHANGE_UNSPECIFIED (0): - Schema change unspecified. - INCOMPATIBLE (1): - Newly discovered schema is incompatible with - existing schema. - MODIFIED (2): - Newly discovered schema has changed from - existing schema for data in a curated zone. - """ - SCHEMA_CHANGE_UNSPECIFIED = 0 - INCOMPATIBLE = 1 - MODIFIED = 2 - - table: str = proto.Field( - proto.STRING, - number=1, - ) - existing_schema: str = proto.Field( - proto.STRING, - number=2, - ) - new_schema: str = proto.Field( - proto.STRING, - number=3, - ) - sampled_data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - schema_change: 'Action.IncompatibleDataSchema.SchemaChange' = proto.Field( - proto.ENUM, - number=5, - enum='Action.IncompatibleDataSchema.SchemaChange', - ) - - class InvalidDataPartition(proto.Message): - r"""Action details for invalid or unsupported partitions detected - by discovery. - - Attributes: - expected_structure (google.cloud.dataplex_v1.types.Action.InvalidDataPartition.PartitionStructure): - The issue type of InvalidDataPartition. - """ - class PartitionStructure(proto.Enum): - r"""The expected partition structure. - - Values: - PARTITION_STRUCTURE_UNSPECIFIED (0): - PartitionStructure unspecified. - CONSISTENT_KEYS (1): - Consistent hive-style partition definition - (both raw and curated zone). - HIVE_STYLE_KEYS (2): - Hive style partition definition (curated zone - only). - """ - PARTITION_STRUCTURE_UNSPECIFIED = 0 - CONSISTENT_KEYS = 1 - HIVE_STYLE_KEYS = 2 - - expected_structure: 'Action.InvalidDataPartition.PartitionStructure' = proto.Field( - proto.ENUM, - number=1, - enum='Action.InvalidDataPartition.PartitionStructure', - ) - - class MissingData(proto.Message): - r"""Action details for absence of data detected by discovery. - """ - - class InvalidDataOrganization(proto.Message): - r"""Action details for invalid data arrangement. - """ - - category: Category = proto.Field( - proto.ENUM, - number=1, - enum=Category, - ) - issue: str = proto.Field( - proto.STRING, - number=2, - ) - detect_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - name: str = proto.Field( - proto.STRING, - number=5, - ) - lake: str = proto.Field( - proto.STRING, - number=6, - ) - zone: str = proto.Field( - proto.STRING, - number=7, - ) - asset: str = proto.Field( - proto.STRING, - number=8, - ) - data_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - invalid_data_format: InvalidDataFormat = proto.Field( - proto.MESSAGE, - number=10, - oneof='details', - message=InvalidDataFormat, - ) - incompatible_data_schema: IncompatibleDataSchema = proto.Field( - proto.MESSAGE, - number=11, - oneof='details', - message=IncompatibleDataSchema, - ) - invalid_data_partition: InvalidDataPartition = proto.Field( - proto.MESSAGE, - number=12, - oneof='details', - message=InvalidDataPartition, - ) - missing_data: MissingData = proto.Field( - proto.MESSAGE, - number=13, - oneof='details', - message=MissingData, - ) - missing_resource: MissingResource = proto.Field( - proto.MESSAGE, - number=14, - oneof='details', - message=MissingResource, - ) - unauthorized_resource: UnauthorizedResource = proto.Field( - proto.MESSAGE, - number=15, - oneof='details', - message=UnauthorizedResource, - ) - failed_security_policy_apply: FailedSecurityPolicyApply = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message=FailedSecurityPolicyApply, - ) - invalid_data_organization: InvalidDataOrganization = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message=InvalidDataOrganization, - ) - - -class Asset(proto.Message): - r"""An asset represents a cloud resource that is being managed - within a lake as a member of a zone. - - Attributes: - name (str): - Output only. The relative resource name of the asset, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - display_name (str): - Optional. User friendly display name. - uid (str): - Output only. System generated globally unique - ID for the asset. This ID will be different if - the asset is deleted and re-created with the - same name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the asset was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the asset was last - updated. - labels (MutableMapping[str, str]): - Optional. User defined labels for the asset. - description (str): - Optional. Description of the asset. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the asset. - resource_spec (google.cloud.dataplex_v1.types.Asset.ResourceSpec): - Required. Specification of the resource that - is referenced by this asset. - resource_status (google.cloud.dataplex_v1.types.Asset.ResourceStatus): - Output only. Status of the resource - referenced by this asset. - security_status (google.cloud.dataplex_v1.types.Asset.SecurityStatus): - Output only. Status of the security policy - applied to resource referenced by this asset. - discovery_spec (google.cloud.dataplex_v1.types.Asset.DiscoverySpec): - Optional. Specification of the discovery - feature applied to data referenced by this - asset. When this spec is left unset, the asset - will use the spec set on the parent zone. - discovery_status (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus): - Output only. Status of the discovery feature - applied to data referenced by this asset. - """ - - class SecurityStatus(proto.Message): - r"""Security policy status of the asset. Data security policy, - i.e., readers, writers & owners, should be specified in the - lake/zone/asset IAM policy. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.SecurityStatus.State): - The current state of the security policy - applied to the attached resource. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - """ - class State(proto.Enum): - r"""The state of the security policy. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - READY (1): - Security policy has been successfully applied - to the attached resource. - APPLYING (2): - Security policy is in the process of being - applied to the attached resource. - ERROR (3): - Security policy could not be applied to the - attached resource due to errors. - """ - STATE_UNSPECIFIED = 0 - READY = 1 - APPLYING = 2 - ERROR = 3 - - state: 'Asset.SecurityStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.SecurityStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - class DiscoverySpec(proto.Message): - r"""Settings to manage the metadata discovery and publishing for - an asset. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enabled (bool): - Optional. Whether discovery is enabled. - include_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to include during discovery if - only a subset of the data should considered. - For Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - exclude_patterns (MutableSequence[str]): - Optional. The list of patterns to apply for - selecting data to exclude during discovery. For - Cloud Storage bucket assets, these are - interpreted as glob patterns used to match - object names. For BigQuery dataset assets, these - are interpreted as patterns to match table - names. - csv_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.CsvOptions): - Optional. Configuration for CSV data. - json_options (google.cloud.dataplex_v1.types.Asset.DiscoverySpec.JsonOptions): - Optional. Configuration for Json data. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running discovery periodically. Successive discovery - runs must be scheduled at least 60 minutes apart. The - default value is to run discovery every 60 minutes. - - To explicitly set a timezone to the cron tab, apply a prefix - in the cron tab: "CRON_TZ=${IANA_TIME_ZONE}" or - TZ=${IANA_TIME_ZONE}". The ${IANA_TIME_ZONE} may only be a - valid string from IANA time zone database. For example, - ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. - - This field is a member of `oneof`_ ``trigger``. - """ - - class CsvOptions(proto.Message): - r"""Describe CSV and similar semi-structured data formats. - - Attributes: - header_rows (int): - Optional. The number of rows to interpret as - header rows that should be skipped when reading - data rows. - delimiter (str): - Optional. The delimiter being used to - separate values. This defaults to ','. - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for CSV data. If true, all columns - will be registered as strings. - """ - - header_rows: int = proto.Field( - proto.INT32, - number=1, - ) - delimiter: str = proto.Field( - proto.STRING, - number=2, - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class JsonOptions(proto.Message): - r"""Describe JSON data format. - - Attributes: - encoding (str): - Optional. The character encoding of the data. - The default is UTF-8. - disable_type_inference (bool): - Optional. Whether to disable the inference of - data type for Json data. If true, all columns - will be registered as their primitive types - (strings, number or boolean). - """ - - encoding: str = proto.Field( - proto.STRING, - number=1, - ) - disable_type_inference: bool = proto.Field( - proto.BOOL, - number=2, - ) - - enabled: bool = proto.Field( - proto.BOOL, - number=1, - ) - include_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - csv_options: 'Asset.DiscoverySpec.CsvOptions' = proto.Field( - proto.MESSAGE, - number=4, - message='Asset.DiscoverySpec.CsvOptions', - ) - json_options: 'Asset.DiscoverySpec.JsonOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='Asset.DiscoverySpec.JsonOptions', - ) - schedule: str = proto.Field( - proto.STRING, - number=10, - oneof='trigger', - ) - - class ResourceSpec(proto.Message): - r"""Identifies the cloud resource that is referenced by this - asset. - - Attributes: - name (str): - Immutable. Relative name of the cloud resource that contains - the data that is being managed within a lake. For example: - ``projects/{project_number}/buckets/{bucket_id}`` - ``projects/{project_number}/datasets/{dataset_id}`` - type_ (google.cloud.dataplex_v1.types.Asset.ResourceSpec.Type): - Required. Immutable. Type of resource. - read_access_mode (google.cloud.dataplex_v1.types.Asset.ResourceSpec.AccessMode): - Optional. Determines how read permissions are - handled for each asset and their associated - tables. Only available to storage buckets - assets. - """ - class Type(proto.Enum): - r"""Type of resource. - - Values: - TYPE_UNSPECIFIED (0): - Type not specified. - STORAGE_BUCKET (1): - Cloud Storage bucket. - BIGQUERY_DATASET (2): - BigQuery dataset. - """ - TYPE_UNSPECIFIED = 0 - STORAGE_BUCKET = 1 - BIGQUERY_DATASET = 2 - - class AccessMode(proto.Enum): - r"""Access Mode determines how data stored within the resource is - read. This is only applicable to storage bucket assets. - - Values: - ACCESS_MODE_UNSPECIFIED (0): - Access mode unspecified. - DIRECT (1): - Default. Data is accessed directly using - storage APIs. - MANAGED (2): - Data is accessed through a managed interface - using BigQuery APIs. - """ - ACCESS_MODE_UNSPECIFIED = 0 - DIRECT = 1 - MANAGED = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Asset.ResourceSpec.Type' = proto.Field( - proto.ENUM, - number=2, - enum='Asset.ResourceSpec.Type', - ) - read_access_mode: 'Asset.ResourceSpec.AccessMode' = proto.Field( - proto.ENUM, - number=5, - enum='Asset.ResourceSpec.AccessMode', - ) - - class ResourceStatus(proto.Message): - r"""Status of the resource referenced by an asset. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.ResourceStatus.State): - The current state of the managed resource. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - managed_access_identity (str): - Output only. Service account associated with - the BigQuery Connection. - """ - class State(proto.Enum): - r"""The state of a resource. - - Values: - STATE_UNSPECIFIED (0): - State unspecified. - READY (1): - Resource does not have any errors. - ERROR (2): - Resource has errors. - """ - STATE_UNSPECIFIED = 0 - READY = 1 - ERROR = 2 - - state: 'Asset.ResourceStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.ResourceStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - managed_access_identity: str = proto.Field( - proto.STRING, - number=4, - ) - - class DiscoveryStatus(proto.Message): - r"""Status of discovery for an asset. - - Attributes: - state (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.State): - The current status of the discovery feature. - message (str): - Additional information about the current - state. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Last update time of the status. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - The start time of the last discovery run. - stats (google.cloud.dataplex_v1.types.Asset.DiscoveryStatus.Stats): - Data Stats of the asset reported by - discovery. - last_run_duration (google.protobuf.duration_pb2.Duration): - The duration of the last discovery run. - """ - class State(proto.Enum): - r"""Current state of discovery. - - Values: - STATE_UNSPECIFIED (0): - State is unspecified. - SCHEDULED (1): - Discovery for the asset is scheduled. - IN_PROGRESS (2): - Discovery for the asset is running. - PAUSED (3): - Discovery for the asset is currently paused - (e.g. due to a lack of available resources). It - will be automatically resumed. - DISABLED (5): - Discovery for the asset is disabled. - """ - STATE_UNSPECIFIED = 0 - SCHEDULED = 1 - IN_PROGRESS = 2 - PAUSED = 3 - DISABLED = 5 - - class Stats(proto.Message): - r"""The aggregated data statistics for the asset reported by - discovery. - - Attributes: - data_items (int): - The count of data items within the referenced - resource. - data_size (int): - The number of stored data bytes within the - referenced resource. - tables (int): - The count of table entities within the - referenced resource. - filesets (int): - The count of fileset entities within the - referenced resource. - """ - - data_items: int = proto.Field( - proto.INT64, - number=1, - ) - data_size: int = proto.Field( - proto.INT64, - number=2, - ) - tables: int = proto.Field( - proto.INT64, - number=3, - ) - filesets: int = proto.Field( - proto.INT64, - number=4, - ) - - state: 'Asset.DiscoveryStatus.State' = proto.Field( - proto.ENUM, - number=1, - enum='Asset.DiscoveryStatus.State', - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - last_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - stats: 'Asset.DiscoveryStatus.Stats' = proto.Field( - proto.MESSAGE, - number=6, - message='Asset.DiscoveryStatus.Stats', - ) - last_run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'State' = proto.Field( - proto.ENUM, - number=8, - enum='State', - ) - resource_spec: ResourceSpec = proto.Field( - proto.MESSAGE, - number=100, - message=ResourceSpec, - ) - resource_status: ResourceStatus = proto.Field( - proto.MESSAGE, - number=101, - message=ResourceStatus, - ) - security_status: SecurityStatus = proto.Field( - proto.MESSAGE, - number=103, - message=SecurityStatus, - ) - discovery_spec: DiscoverySpec = proto.Field( - proto.MESSAGE, - number=106, - message=DiscoverySpec, - ) - discovery_status: DiscoveryStatus = proto.Field( - proto.MESSAGE, - number=107, - message=DiscoveryStatus, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py deleted file mode 100644 index 77accbfca7d7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/security.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'ResourceAccessSpec', - 'DataAccessSpec', - }, -) - - -class ResourceAccessSpec(proto.Message): - r"""ResourceAccessSpec holds the access control configuration to - be enforced on the resources, for example, Cloud Storage bucket, - BigQuery dataset, BigQuery table. - - Attributes: - readers (MutableSequence[str]): - Optional. The format of strings follows the - pattern followed by IAM in the bindings. - user:{email}, serviceAccount:{email} - group:{email}. The set of principals to be - granted reader role on the resource. - writers (MutableSequence[str]): - Optional. The set of principals to be granted - writer role on the resource. - owners (MutableSequence[str]): - Optional. The set of principals to be granted - owner role on the resource. - """ - - readers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - writers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - owners: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DataAccessSpec(proto.Message): - r"""DataAccessSpec holds the access control configuration to be - enforced on data stored within resources (eg: rows, columns in - BigQuery Tables). When associated with data, the data is only - accessible to principals explicitly granted access through the - DataAccessSpec. Principals with access to the containing - resource are not implicitly granted access. - - Attributes: - readers (MutableSequence[str]): - Optional. The format of strings follows the - pattern followed by IAM in the bindings. - user:{email}, serviceAccount:{email} - group:{email}. The set of principals to be - granted reader role on data stored within - resources. - """ - - readers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py deleted file mode 100644 index a790937d97ce..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/service.py +++ /dev/null @@ -1,1395 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import tasks as gcd_tasks -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'CreateLakeRequest', - 'UpdateLakeRequest', - 'DeleteLakeRequest', - 'ListLakesRequest', - 'ListLakesResponse', - 'ListLakeActionsRequest', - 'ListActionsResponse', - 'GetLakeRequest', - 'CreateZoneRequest', - 'UpdateZoneRequest', - 'DeleteZoneRequest', - 'ListZonesRequest', - 'ListZonesResponse', - 'ListZoneActionsRequest', - 'GetZoneRequest', - 'CreateAssetRequest', - 'UpdateAssetRequest', - 'DeleteAssetRequest', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListAssetActionsRequest', - 'GetAssetRequest', - 'OperationMetadata', - 'CreateTaskRequest', - 'UpdateTaskRequest', - 'DeleteTaskRequest', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'GetJobRequest', - 'RunTaskRequest', - 'RunTaskResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'CancelJobRequest', - 'CreateEnvironmentRequest', - 'UpdateEnvironmentRequest', - 'DeleteEnvironmentRequest', - 'ListEnvironmentsRequest', - 'ListEnvironmentsResponse', - 'GetEnvironmentRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - }, -) - - -class CreateLakeRequest(proto.Message): - r"""Create lake request. - - Attributes: - parent (str): - Required. The resource name of the lake location, of the - form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a Google Cloud region. - lake_id (str): - Required. Lake identifier. This ID will be used to generate - names such as database and dataset names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the customer project / location. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Lake resource - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - lake_id: str = proto.Field( - proto.STRING, - number=2, - ) - lake: resources.Lake = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Lake, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateLakeRequest(proto.Message): - r"""Update lake request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - lake (google.cloud.dataplex_v1.types.Lake): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - lake: resources.Lake = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Lake, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteLakeRequest(proto.Message): - r"""Delete lake request. - - Attributes: - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLakesRequest(proto.Message): - r"""List lakes request. - - Attributes: - parent (str): - Required. The resource name of the lake location, of the - form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a Google Cloud region. - page_size (int): - Optional. Maximum number of Lakes to return. - The service may return fewer than this value. If - unspecified, at most 10 lakes will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListLakes`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListLakes`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListLakesResponse(proto.Message): - r"""List lakes response. - - Attributes: - lakes (MutableSequence[google.cloud.dataplex_v1.types.Lake]): - Lakes under the given parent location. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - lakes: MutableSequence[resources.Lake] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Lake, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListLakeActionsRequest(proto.Message): - r"""List lake actions request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListLakeActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListLakeActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListActionsResponse(proto.Message): - r"""List actions response. - - Attributes: - actions (MutableSequence[google.cloud.dataplex_v1.types.Action]): - Actions under the given parent - lake/zone/asset. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - actions: MutableSequence[resources.Action] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Action, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetLakeRequest(proto.Message): - r"""Get lake request. - - Attributes: - name (str): - Required. The resource name of the lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateZoneRequest(proto.Message): - r"""Create zone request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - zone_id (str): - Required. Zone identifier. This ID will be used to generate - names such as database and dataset names when publishing - metadata to Hive Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique across all lakes from all locations in a - project. - - Must not be one of the reserved IDs (i.e. "default", - "global-temp") - zone (google.cloud.dataplex_v1.types.Zone): - Required. Zone resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - zone_id: str = proto.Field( - proto.STRING, - number=2, - ) - zone: resources.Zone = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Zone, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateZoneRequest(proto.Message): - r"""Update zone request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - zone (google.cloud.dataplex_v1.types.Zone): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - zone: resources.Zone = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Zone, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteZoneRequest(proto.Message): - r"""Delete zone request. - - Attributes: - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListZonesRequest(proto.Message): - r"""List zones request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of zones to return. - The service may return fewer than this value. If - unspecified, at most 10 zones will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListZones`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListZones`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListZonesResponse(proto.Message): - r"""List zones response. - - Attributes: - zones (MutableSequence[google.cloud.dataplex_v1.types.Zone]): - Zones under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - zones: MutableSequence[resources.Zone] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Zone, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListZoneActionsRequest(proto.Message): - r"""List zone actions request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListZoneActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListZoneActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetZoneRequest(proto.Message): - r"""Get zone request. - - Attributes: - name (str): - Required. The resource name of the zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAssetRequest(proto.Message): - r"""Create asset request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - asset_id (str): - Required. Asset identifier. This ID will be used to generate - names such as table names when publishing metadata to Hive - Metastore and BigQuery. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must end with a number or a letter. - - Must be between 1-63 characters. - - Must be unique within the zone. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Asset resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - asset_id: str = proto.Field( - proto.STRING, - number=2, - ) - asset: resources.Asset = proto.Field( - proto.MESSAGE, - number=3, - message=resources.Asset, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateAssetRequest(proto.Message): - r"""Update asset request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - asset (google.cloud.dataplex_v1.types.Asset): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - asset: resources.Asset = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Asset, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteAssetRequest(proto.Message): - r"""Delete asset request. - - Attributes: - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListAssetsRequest(proto.Message): - r"""List assets request. - - Attributes: - parent (str): - Required. The resource name of the parent zone: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. - page_size (int): - Optional. Maximum number of asset to return. - The service may return fewer than this value. If - unspecified, at most 10 assets will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListAssets`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListAssets`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAssetsResponse(proto.Message): - r"""List assets response. - - Attributes: - assets (MutableSequence[google.cloud.dataplex_v1.types.Asset]): - Asset under the given parent zone. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - assets: MutableSequence[resources.Asset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Asset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListAssetActionsRequest(proto.Message): - r"""List asset actions request. - - Attributes: - parent (str): - Required. The resource name of the parent asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - page_size (int): - Optional. Maximum number of actions to - return. The service may return fewer than this - value. If unspecified, at most 10 actions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListAssetActions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListAssetActions`` must match the call that - provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetAssetRequest(proto.Message): - r"""Get asset request. - - Attributes: - name (str): - Required. The resource name of the asset: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of a long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class CreateTaskRequest(proto.Message): - r"""Create task request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - task_id (str): - Required. Task identifier. - task (google.cloud.dataplex_v1.types.Task): - Required. Task resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - task: gcd_tasks.Task = proto.Field( - proto.MESSAGE, - number=3, - message=gcd_tasks.Task, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateTaskRequest(proto.Message): - r"""Update task request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - task (google.cloud.dataplex_v1.types.Task): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - task: gcd_tasks.Task = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tasks.Task, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteTaskRequest(proto.Message): - r"""Delete task request. - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTasksRequest(proto.Message): - r"""List tasks request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of tasks to return. - The service may return fewer than this value. If - unspecified, at most 10 tasks will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListZones`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListZones`` - must match the call that provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListTasksResponse(proto.Message): - r"""List tasks response. - - Attributes: - tasks (MutableSequence[google.cloud.dataplex_v1.types.Task]): - Tasks under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable_locations (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - tasks: MutableSequence[gcd_tasks.Task] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tasks.Task, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetTaskRequest(proto.Message): - r"""Get task request. - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetJobRequest(proto.Message): - r"""Get job request. - - Attributes: - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RunTaskRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The resource name of the task: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the task. - If the map is left empty, the task will run with - existing labels from task definition. If the map - contains an entry with a new key, the same will - be added to existing set of labels. If the map - contains an entry with an existing label key in - task definition, the task will run with new - label value for that entry. Clearing an existing - label will require label value to be explicitly - set to a hyphen "-". The label value cannot be - empty. - args (MutableMapping[str, str]): - Optional. Execution spec arguments. If the - map is left empty, the task will run with - existing execution spec args from task - definition. If the map contains an entry with a - new key, the same will be added to existing set - of args. If the map contains an entry with an - existing arg key in task definition, the task - will run with new arg value for that entry. - Clearing an existing arg will require arg value - to be explicitly set to a hyphen "-". The arg - value cannot be empty. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - args: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class RunTaskResponse(proto.Message): - r""" - - Attributes: - job (google.cloud.dataplex_v1.types.Job): - Jobs created by RunTask API. - """ - - job: gcd_tasks.Job = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tasks.Job, - ) - - -class ListJobsRequest(proto.Message): - r"""List jobs request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``. - page_size (int): - Optional. Maximum number of jobs to return. - The service may return fewer than this value. If - unspecified, at most 10 jobs will be returned. - The maximum value is 1000; values above 1000 - will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous ``ListJobs`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to ``ListJobs`` - must match the call that provided the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobsResponse(proto.Message): - r"""List jobs response. - - Attributes: - jobs (MutableSequence[google.cloud.dataplex_v1.types.Job]): - Jobs under a given task. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[gcd_tasks.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tasks.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelJobRequest(proto.Message): - r"""Cancel task jobs. - - Attributes: - name (str): - Required. The resource name of the job: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateEnvironmentRequest(proto.Message): - r"""Create environment request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - environment_id (str): - Required. Environment identifier. - - - Must contain only lowercase letters, numbers and hyphens. - - Must start with a letter. - - Must be between 1-63 characters. - - Must end with a number or a letter. - - Must be unique within the lake. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Environment resource. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - environment_id: str = proto.Field( - proto.STRING, - number=2, - ) - environment: analyze.Environment = proto.Field( - proto.MESSAGE, - number=3, - message=analyze.Environment, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateEnvironmentRequest(proto.Message): - r"""Update environment request. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - environment (google.cloud.dataplex_v1.types.Environment): - Required. Update description. Only fields specified in - ``update_mask`` are updated. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - environment: analyze.Environment = proto.Field( - proto.MESSAGE, - number=2, - message=analyze.Environment, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteEnvironmentRequest(proto.Message): - r"""Delete environment request. - - Attributes: - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListEnvironmentsRequest(proto.Message): - r"""List environments request. - - Attributes: - parent (str): - Required. The resource name of the parent lake: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}``. - page_size (int): - Optional. Maximum number of environments to - return. The service may return fewer than this - value. If unspecified, at most 10 environments - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListEnvironments`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEnvironments`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. - order_by (str): - Optional. Order by fields for the result. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListEnvironmentsResponse(proto.Message): - r"""List environments response. - - Attributes: - environments (MutableSequence[google.cloud.dataplex_v1.types.Environment]): - Environments under the given parent lake. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - environments: MutableSequence[analyze.Environment] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Environment, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetEnvironmentRequest(proto.Message): - r"""Get environment request. - - Attributes: - name (str): - Required. The resource name of the environment: - ``projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSessionsRequest(proto.Message): - r"""List sessions request. - - Attributes: - parent (str): - Required. The resource name of the parent environment: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}``. - page_size (int): - Optional. Maximum number of sessions to - return. The service may return fewer than this - value. If unspecified, at most 10 sessions will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. Page token received from a previous - ``ListSessions`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListSessions`` must match the call that - provided the page token. - filter (str): - Optional. Filter request. The following ``mode`` filter is - supported to return only the sessions belonging to the - requester when the mode is USER and return sessions of all - the users when the mode is ADMIN. When no filter is sent - default to USER mode. NOTE: When the mode is ADMIN, the - requester should have - ``dataplex.environments.listAllSessions`` permission to list - all sessions, in absence of the permission, the request - fails. - - mode = ADMIN \| USER - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSessionsResponse(proto.Message): - r"""List sessions response. - - Attributes: - sessions (MutableSequence[google.cloud.dataplex_v1.types.Session]): - Sessions under a given environment. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - sessions: MutableSequence[analyze.Session] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=analyze.Session, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py b/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py deleted file mode 100644 index 99cff935610f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/tasks.py +++ /dev/null @@ -1,753 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataplex_v1.types import resources -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataplex.v1', - manifest={ - 'Task', - 'Job', - }, -) - - -class Task(proto.Message): - r"""A task represents a user-visible job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The relative resource name of the task, of the - form: - projects/{project_number}/locations/{location_id}/lakes/{lake_id}/ - tasks/{task_id}. - uid (str): - Output only. System generated globally unique - ID for the task. This ID will be different if - the task is deleted and re-created with the same - name. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the task was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the task was last - updated. - description (str): - Optional. Description of the task. - display_name (str): - Optional. User friendly display name. - state (google.cloud.dataplex_v1.types.State): - Output only. Current state of the task. - labels (MutableMapping[str, str]): - Optional. User-defined labels for the task. - trigger_spec (google.cloud.dataplex_v1.types.Task.TriggerSpec): - Required. Spec related to how often and when - a task should be triggered. - execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): - Required. Spec related to how a task is - executed. - execution_status (google.cloud.dataplex_v1.types.Task.ExecutionStatus): - Output only. Status of the latest task - executions. - spark (google.cloud.dataplex_v1.types.Task.SparkTaskConfig): - Config related to running custom Spark tasks. - - This field is a member of `oneof`_ ``config``. - notebook (google.cloud.dataplex_v1.types.Task.NotebookTaskConfig): - Config related to running scheduled - Notebooks. - - This field is a member of `oneof`_ ``config``. - """ - - class InfrastructureSpec(proto.Message): - r"""Configuration for the underlying infrastructure used to run - workloads. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - batch (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.BatchComputeResources): - Compute resources needed for a Task when - using Dataproc Serverless. - - This field is a member of `oneof`_ ``resources``. - container_image (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.ContainerImageRuntime): - Container Image Runtime Configuration. - - This field is a member of `oneof`_ ``runtime``. - vpc_network (google.cloud.dataplex_v1.types.Task.InfrastructureSpec.VpcNetwork): - Vpc network. - - This field is a member of `oneof`_ ``network``. - """ - - class BatchComputeResources(proto.Message): - r"""Batch compute resources associated with the task. - - Attributes: - executors_count (int): - Optional. Total number of job executors. Executor Count - should be between 2 and 100. [Default=2] - max_executors_count (int): - Optional. Max configurable executors. If max_executors_count - > executors_count, then auto-scaling is enabled. Max - Executor Count should be between 2 and 1000. [Default=1000] - """ - - executors_count: int = proto.Field( - proto.INT32, - number=1, - ) - max_executors_count: int = proto.Field( - proto.INT32, - number=2, - ) - - class ContainerImageRuntime(proto.Message): - r"""Container Image Runtime Configuration used with Batch - execution. - - Attributes: - image (str): - Optional. Container image to use. - java_jars (MutableSequence[str]): - Optional. A list of Java JARS to add to the - classpath. Valid input includes Cloud Storage - URIs to Jar binaries. For example, - gs://bucket-name/my/path/to/file.jar - python_packages (MutableSequence[str]): - Optional. A list of python packages to be - installed. Valid formats include Cloud Storage - URI to a PIP installable library. For example, - gs://bucket-name/my/path/to/lib.tar.gz - properties (MutableMapping[str, str]): - Optional. Override to common configuration of open source - components installed on the Dataproc cluster. The properties - to set on daemon config files. Property keys are specified - in ``prefix:property`` format, for example - ``core:hadoop.tmp.dir``. For more information, see `Cluster - properties `__. - """ - - image: str = proto.Field( - proto.STRING, - number=1, - ) - java_jars: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_packages: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - class VpcNetwork(proto.Message): - r"""Cloud VPC Network used to run the infrastructure. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - network (str): - Optional. The Cloud VPC network in which the - job is run. By default, the Cloud VPC network - named Default within the project is used. - - This field is a member of `oneof`_ ``network_name``. - sub_network (str): - Optional. The Cloud VPC sub-network in which - the job is run. - - This field is a member of `oneof`_ ``network_name``. - network_tags (MutableSequence[str]): - Optional. List of network tags to apply to - the job. - """ - - network: str = proto.Field( - proto.STRING, - number=1, - oneof='network_name', - ) - sub_network: str = proto.Field( - proto.STRING, - number=2, - oneof='network_name', - ) - network_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - batch: 'Task.InfrastructureSpec.BatchComputeResources' = proto.Field( - proto.MESSAGE, - number=52, - oneof='resources', - message='Task.InfrastructureSpec.BatchComputeResources', - ) - container_image: 'Task.InfrastructureSpec.ContainerImageRuntime' = proto.Field( - proto.MESSAGE, - number=101, - oneof='runtime', - message='Task.InfrastructureSpec.ContainerImageRuntime', - ) - vpc_network: 'Task.InfrastructureSpec.VpcNetwork' = proto.Field( - proto.MESSAGE, - number=150, - oneof='network', - message='Task.InfrastructureSpec.VpcNetwork', - ) - - class TriggerSpec(proto.Message): - r"""Task scheduling and trigger settings. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.dataplex_v1.types.Task.TriggerSpec.Type): - Required. Immutable. Trigger type of the - user-specified Task. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The first run of the task will be after this time. - If not specified, the task will run shortly after being - submitted if ON_DEMAND and based on the schedule if - RECURRING. - disabled (bool): - Optional. Prevent the task from executing. - This does not cancel already running tasks. It - is intended to temporarily disable RECURRING - tasks. - max_retries (int): - Optional. Number of retry attempts before - aborting. Set to zero to never attempt to retry - a failed task. - schedule (str): - Optional. Cron schedule (https://en.wikipedia.org/wiki/Cron) - for running tasks periodically. To explicitly set a timezone - to the cron tab, apply a prefix in the cron tab: - "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The - ${IANA_TIME_ZONE} may only be a valid string from IANA time - zone database. For example, - ``CRON_TZ=America/New_York 1 * * * *``, or - ``TZ=America/New_York 1 * * * *``. This field is required - for RECURRING tasks. - - This field is a member of `oneof`_ ``trigger``. - """ - class Type(proto.Enum): - r"""Determines how often and when the job will run. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified trigger type. - ON_DEMAND (1): - The task runs one-time shortly after Task - Creation. - RECURRING (2): - The task is scheduled to run periodically. - """ - TYPE_UNSPECIFIED = 0 - ON_DEMAND = 1 - RECURRING = 2 - - type_: 'Task.TriggerSpec.Type' = proto.Field( - proto.ENUM, - number=5, - enum='Task.TriggerSpec.Type', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - disabled: bool = proto.Field( - proto.BOOL, - number=4, - ) - max_retries: int = proto.Field( - proto.INT32, - number=7, - ) - schedule: str = proto.Field( - proto.STRING, - number=100, - oneof='trigger', - ) - - class ExecutionSpec(proto.Message): - r"""Execution related settings, like retry and service_account. - - Attributes: - args (MutableMapping[str, str]): - Optional. The arguments to pass to the task. The args can - use placeholders of the format ${placeholder} as part of - key/value string. These will be interpolated before passing - the args to the driver. Currently supported placeholders: - - - ${task_id} - - ${job_time} To pass positional args, set the key as - TASK_ARGS. The value should be a comma-separated string of - all the positional arguments. To use a delimiter other - than comma, refer to - https://cloud.google.com/sdk/gcloud/reference/topic/escaping. - In case of other keys being present in the args, then - TASK_ARGS will be passed as the last argument. - service_account (str): - Required. Service account to use to execute a - task. If not provided, the default Compute - service account for the project is used. - project (str): - Optional. The project in which jobs are run. By default, the - project containing the Lake is used. If a project is - provided, the - [ExecutionSpec.service_account][google.cloud.dataplex.v1.Task.ExecutionSpec.service_account] - must belong to this project. - max_job_execution_lifetime (google.protobuf.duration_pb2.Duration): - Optional. The maximum duration after which - the job execution is expired. - kms_key (str): - Optional. The Cloud KMS key to use for encryption, of the - form: - ``projects/{project_number}/locations/{location_id}/keyRings/{key-ring-name}/cryptoKeys/{key-name}``. - """ - - args: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - service_account: str = proto.Field( - proto.STRING, - number=5, - ) - project: str = proto.Field( - proto.STRING, - number=7, - ) - max_job_execution_lifetime: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - kms_key: str = proto.Field( - proto.STRING, - number=9, - ) - - class SparkTaskConfig(proto.Message): - r"""User-specified config for running a Spark task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - main_jar_file_uri (str): - The Cloud Storage URI of the jar file that contains the main - class. The execution args are passed in as a sequence of - named process arguments (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - main_class (str): - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. The execution args are - passed in as a sequence of named process arguments - (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - python_script_file (str): - The Gcloud Storage URI of the main Python file to use as the - driver. Must be a .py file. The execution args are passed in - as a sequence of named process arguments (``--key=value``). - - This field is a member of `oneof`_ ``driver``. - sql_script_file (str): - A reference to a query file. This should be the Cloud - Storage URI of the query file. The execution args are used - to declare a set of script variables (``set key="value";``). - - This field is a member of `oneof`_ ``driver``. - sql_script (str): - The query text. The execution args are used to declare a set - of script variables (``set key="value";``). - - This field is a member of `oneof`_ ``driver``. - file_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of files to be - placed in the working directory of each - executor. - archive_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of archives to - be extracted into the working directory of each - executor. Supported file types: .jar, .tar, - .tar.gz, .tgz, and .zip. - infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): - Optional. Infrastructure specification for - the execution. - """ - - main_jar_file_uri: str = proto.Field( - proto.STRING, - number=100, - oneof='driver', - ) - main_class: str = proto.Field( - proto.STRING, - number=101, - oneof='driver', - ) - python_script_file: str = proto.Field( - proto.STRING, - number=102, - oneof='driver', - ) - sql_script_file: str = proto.Field( - proto.STRING, - number=104, - oneof='driver', - ) - sql_script: str = proto.Field( - proto.STRING, - number=105, - oneof='driver', - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( - proto.MESSAGE, - number=6, - message='Task.InfrastructureSpec', - ) - - class NotebookTaskConfig(proto.Message): - r"""Config for running scheduled notebooks. - - Attributes: - notebook (str): - Required. Path to input notebook. This can be the Cloud - Storage URI of the notebook file or the path to a Notebook - Content. The execution args are accessible as environment - variables (``TASK_key=value``). - infrastructure_spec (google.cloud.dataplex_v1.types.Task.InfrastructureSpec): - Optional. Infrastructure specification for - the execution. - file_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of files to be - placed in the working directory of each - executor. - archive_uris (MutableSequence[str]): - Optional. Cloud Storage URIs of archives to - be extracted into the working directory of each - executor. Supported file types: .jar, .tar, - .tar.gz, .tgz, and .zip. - """ - - notebook: str = proto.Field( - proto.STRING, - number=4, - ) - infrastructure_spec: 'Task.InfrastructureSpec' = proto.Field( - proto.MESSAGE, - number=3, - message='Task.InfrastructureSpec', - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - class ExecutionStatus(proto.Message): - r"""Status of the task execution (e.g. Jobs). - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of the status. - latest_job (google.cloud.dataplex_v1.types.Job): - Output only. latest job execution - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - latest_job: 'Job' = proto.Field( - proto.MESSAGE, - number=9, - message='Job', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - state: resources.State = proto.Field( - proto.ENUM, - number=7, - enum=resources.State, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - trigger_spec: TriggerSpec = proto.Field( - proto.MESSAGE, - number=100, - message=TriggerSpec, - ) - execution_spec: ExecutionSpec = proto.Field( - proto.MESSAGE, - number=101, - message=ExecutionSpec, - ) - execution_status: ExecutionStatus = proto.Field( - proto.MESSAGE, - number=201, - message=ExecutionStatus, - ) - spark: SparkTaskConfig = proto.Field( - proto.MESSAGE, - number=300, - oneof='config', - message=SparkTaskConfig, - ) - notebook: NotebookTaskConfig = proto.Field( - proto.MESSAGE, - number=302, - oneof='config', - message=NotebookTaskConfig, - ) - - -class Job(proto.Message): - r"""A job represents an instance of a task. - - Attributes: - name (str): - Output only. The relative resource name of the job, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}``. - uid (str): - Output only. System generated globally unique - ID for the job. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the job was - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the job ended. - state (google.cloud.dataplex_v1.types.Job.State): - Output only. Execution state for the job. - retry_count (int): - Output only. The number of times the job has - been retried (excluding the initial attempt). - service (google.cloud.dataplex_v1.types.Job.Service): - Output only. The underlying service running a - job. - service_job (str): - Output only. The full resource name for the - job run under a particular service. - message (str): - Output only. Additional information about the - current state. - labels (MutableMapping[str, str]): - Output only. User-defined labels for the - task. - trigger (google.cloud.dataplex_v1.types.Job.Trigger): - Output only. Job execution trigger. - execution_spec (google.cloud.dataplex_v1.types.Task.ExecutionSpec): - Output only. Spec related to how a task is - executed. - """ - class Service(proto.Enum): - r""" - - Values: - SERVICE_UNSPECIFIED (0): - Service used to run the job is unspecified. - DATAPROC (1): - Dataproc service is used to run this job. - """ - SERVICE_UNSPECIFIED = 0 - DATAPROC = 1 - - class State(proto.Enum): - r""" - - Values: - STATE_UNSPECIFIED (0): - The job state is unknown. - RUNNING (1): - The job is running. - CANCELLING (2): - The job is cancelling. - CANCELLED (3): - The job cancellation was successful. - SUCCEEDED (4): - The job completed successfully. - FAILED (5): - The job is no longer running due to an error. - ABORTED (6): - The job was cancelled outside of Dataplex - Universal Catalog. - """ - STATE_UNSPECIFIED = 0 - RUNNING = 1 - CANCELLING = 2 - CANCELLED = 3 - SUCCEEDED = 4 - FAILED = 5 - ABORTED = 6 - - class Trigger(proto.Enum): - r"""Job execution trigger. - - Values: - TRIGGER_UNSPECIFIED (0): - The trigger is unspecified. - TASK_CONFIG (1): - The job was triggered by Dataplex Universal - Catalog based on trigger spec from task - definition. - RUN_REQUEST (2): - The job was triggered by the explicit call of - Task API. - """ - TRIGGER_UNSPECIFIED = 0 - TASK_CONFIG = 1 - RUN_REQUEST = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - retry_count: int = proto.Field( - proto.UINT32, - number=6, - ) - service: Service = proto.Field( - proto.ENUM, - number=7, - enum=Service, - ) - service_job: str = proto.Field( - proto.STRING, - number=8, - ) - message: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - trigger: Trigger = proto.Field( - proto.ENUM, - number=11, - enum=Trigger, - ) - execution_spec: 'Task.ExecutionSpec' = proto.Field( - proto.MESSAGE, - number=100, - message='Task.ExecutionSpec', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini b/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py b/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py deleted file mode 100644 index 321a359c4f6b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/noxfile.py +++ /dev/null @@ -1,591 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-dataplex" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/synthtool/issues/1976): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py deleted file mode 100644 index 672f6636c29d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_metadata_job(request=request) - - -# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py deleted file mode 100644 index 992adf2c9126..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_cancel_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelMetadataJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_metadata_job(request=request) - - -# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py deleted file mode 100644 index fcdb536becdd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py deleted file mode 100644 index ebd0cd697960..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_aspect_type_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.CreateAspectTypeRequest( - parent="parent_value", - aspect_type_id="aspect_type_id_value", - aspect_type=aspect_type, - ) - - # Make the request - operation = client.create_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py deleted file mode 100644 index 307179eca305..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py deleted file mode 100644 index 10156ffe9405..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py deleted file mode 100644 index e6c5c4b0c6e0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_group_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - operation = client.create_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py deleted file mode 100644 index 9d08ecbcc807..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py deleted file mode 100644 index c8c93d0810d9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py deleted file mode 100644 index 1f07e18303ec..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_type_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEntryTypeRequest( - parent="parent_value", - entry_type_id="entry_type_id_value", - ) - - # Make the request - operation = client.create_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py deleted file mode 100644 index 176dc8f6b21a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "NONE" - metadata_job.import_spec.aspect_sync_mode = "NONE" - metadata_job.type_ = "EXPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py deleted file mode 100644 index fffa3dca81b3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - metadata_job = dataplex_v1.MetadataJob() - metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] - metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "NONE" - metadata_job.import_spec.aspect_sync_mode = "NONE" - metadata_job.type_ = "EXPORT" - - request = dataplex_v1.CreateMetadataJobRequest( - parent="parent_value", - metadata_job=metadata_job, - ) - - # Make the request - operation = client.create_metadata_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py deleted file mode 100644 index 977d70c886fe..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py deleted file mode 100644 index 7bb600cd72a3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAspectTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py deleted file mode 100644 index 7f75da02f2a5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py deleted file mode 100644 index 0efb92609a71..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py deleted file mode 100644 index cf187e0a1e01..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_group_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py deleted file mode 100644 index 827f2dfb7f63..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - response = client.delete_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py deleted file mode 100644 index 8a877e0d7180..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py deleted file mode 100644 index eaed87a6d471..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_type_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_DeleteEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntryTypeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_DeleteEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py deleted file mode 100644 index 3c98341d85fd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_aspect_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py deleted file mode 100644 index 238e178abf2e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_aspect_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAspectTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_aspect_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py deleted file mode 100644 index ec327f90378e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py deleted file mode 100644 index 6c047da8ee7a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py deleted file mode 100644 index 6d552d21d845..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_group_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py deleted file mode 100644 index bd0bf1f995b0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py deleted file mode 100644 index 4a57ddcf1cf3..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py deleted file mode 100644 index 27060439c400..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_type_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntryTypeRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_type(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py deleted file mode 100644 index d2da4af3ae6c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetMetadataJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_metadata_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetMetadataJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py deleted file mode 100644 index 40fcab33f820..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMetadataJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_GetMetadataJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_metadata_job(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetMetadataJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_metadata_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_GetMetadataJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py deleted file mode 100644 index 8b9ae3b16b14..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAspectTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListAspectTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListAspectTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py deleted file mode 100644 index 7f9ca88942bd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_aspect_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAspectTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListAspectTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_aspect_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAspectTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_aspect_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListAspectTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py deleted file mode 100644 index 7562629a5995..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py deleted file mode 100644 index c7a9e8bceb7c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entries_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py deleted file mode 100644 index e414df47c3a4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryGroups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py deleted file mode 100644 index eb480ab9c3d1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_groups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryGroups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entry_groups(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py deleted file mode 100644 index e6bcb19bc98e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryTypes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryTypes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py deleted file mode 100644 index 9cc615ff9dd5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_entry_types_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryTypes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListEntryTypes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entry_types(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntryTypesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_types(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListEntryTypes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py deleted file mode 100644 index 9a08c5162a08..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetadataJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py deleted file mode 100644 index 36fd4a81d24c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetadataJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_metadata_jobs(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListMetadataJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_metadata_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py deleted file mode 100644 index 9aacffaf9a30..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_LookupEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py deleted file mode 100644 index f79f5f04126e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_lookup_entry_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_LookupEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_lookup_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.LookupEntryRequest( - name="name_value", - entry="entry_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py deleted file mode 100644 index ba04f5e078a4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_SearchEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_SearchEntries_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py deleted file mode 100644 index 04b5643e21c6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_search_entries_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_SearchEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_search_entries(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.SearchEntriesRequest( - name="name_value", - query="query_value", - ) - - # Make the request - page_result = client.search_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CatalogService_SearchEntries_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py deleted file mode 100644 index fdaad89c0129..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateAspectType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateAspectType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py deleted file mode 100644 index 79049dd35006..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_aspect_type_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAspectType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateAspectType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_aspect_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - aspect_type = dataplex_v1.AspectType() - aspect_type.metadata_template.name = "name_value" - aspect_type.metadata_template.type_ = "type__value" - - request = dataplex_v1.UpdateAspectTypeRequest( - aspect_type=aspect_type, - ) - - # Make the request - operation = client.update_aspect_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateAspectType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py deleted file mode 100644 index b1acbea5b5db..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py deleted file mode 100644 index c084f241abb9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py deleted file mode 100644 index 82aa81ae58fd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_group_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry_group(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryGroupRequest( - ) - - # Make the request - operation = client.update_entry_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py deleted file mode 100644 index 4d38a5ac7833..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - entry = dataplex_v1.Entry() - entry.entry_type = "entry_type_value" - - request = dataplex_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py deleted file mode 100644 index f19a55e6b0e6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryType_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryType_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py deleted file mode 100644 index 47bd13e333ae..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_update_entry_type_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryType -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CatalogService_UpdateEntryType_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entry_type(): - # Create a client - client = dataplex_v1.CatalogServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEntryTypeRequest( - ) - - # Make the request - operation = client.update_entry_type(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CatalogService_UpdateEntryType_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py deleted file mode 100644 index 4cc2c823341a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_CreateEncryptionConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEncryptionConfigRequest( - parent="parent_value", - encryption_config_id="encryption_config_id_value", - ) - - # Make the request - operation = client.create_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_CreateEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py deleted file mode 100644 index c170b0a8d37c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_create_encryption_config_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateEncryptionConfigRequest( - parent="parent_value", - encryption_config_id="encryption_config_id_value", - ) - - # Make the request - operation = client.create_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py deleted file mode 100644 index bc483ea781a9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py deleted file mode 100644 index b79392c229ea..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py deleted file mode 100644 index 6c00b9ee027e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_GetEncryptionConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_encryption_config(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_GetEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py deleted file mode 100644 index 837fdfb77220..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_get_encryption_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_GetEncryptionConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEncryptionConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_encryption_config(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_GetEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py deleted file mode 100644 index 0af63d57396e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEncryptionConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_ListEncryptionConfigs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_encryption_configs(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEncryptionConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_encryption_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_CmekService_ListEncryptionConfigs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py deleted file mode 100644 index 6721d3906f0c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEncryptionConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_encryption_configs(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEncryptionConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_encryption_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py deleted file mode 100644 index bcbaaaa68e3d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEncryptionConfigRequest( - ) - - # Make the request - operation = client.update_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py deleted file mode 100644 index e35470d3489c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_cmek_service_update_encryption_config_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEncryptionConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_encryption_config(): - # Create a client - client = dataplex_v1.CmekServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateEncryptionConfigRequest( - ) - - # Make the request - operation = client.update_encryption_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py deleted file mode 100644 index ee8bca70432f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_CreateContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = await client.create_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_CreateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py deleted file mode 100644 index 51ebad7acbc6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_create_content_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_CreateContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.CreateContentRequest( - parent="parent_value", - content=content, - ) - - # Make the request - response = client.create_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_CreateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py deleted file mode 100644 index cb2b560d1ddf..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_DeleteContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - await client.delete_content(request=request) - - -# [END dataplex_v1_generated_ContentService_DeleteContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py deleted file mode 100644 index 00bc415820e8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_delete_content_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_DeleteContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteContentRequest( - name="name_value", - ) - - # Make the request - client.delete_content(request=request) - - -# [END dataplex_v1_generated_ContentService_DeleteContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py deleted file mode 100644 index 8f536a550208..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py deleted file mode 100644 index 99d63ea78c5a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_content_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetContentRequest( - name="name_value", - ) - - # Make the request - response = client.get_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py deleted file mode 100644 index b2684491d612..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py deleted file mode 100644 index e1c27f30c89c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py deleted file mode 100644 index fee9a49cd105..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_ListContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_ContentService_ListContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py deleted file mode 100644 index 45cfe1764d91..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_list_content_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_ListContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListContentRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_content(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_ContentService_ListContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py deleted file mode 100644 index 23071cd66da7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py deleted file mode 100644 index 84a85d8c891e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py deleted file mode 100644 index b5b6956a100c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py deleted file mode 100644 index 88e6c0a30818..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py deleted file mode 100644 index 294c1dc90689..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_UpdateContent_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceAsyncClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = await client.update_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_UpdateContent_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py deleted file mode 100644 index 3bd002a75b1c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_content_service_update_content_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateContent -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_ContentService_UpdateContent_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_content(): - # Create a client - client = dataplex_v1.ContentServiceClient() - - # Initialize request argument(s) - content = dataplex_v1.Content() - content.data_text = "data_text_value" - content.sql_script.engine = "SPARK" - content.path = "path_value" - - request = dataplex_v1.UpdateContentRequest( - content=content, - ) - - # Make the request - response = client.update_content(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_ContentService_UpdateContent_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py deleted file mode 100644 index 6d7e850f7d00..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_CreateDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_CreateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py deleted file mode 100644 index 8991dae05723..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_create_data_scan_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_CreateDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.CreateDataScanRequest( - parent="parent_value", - data_scan=data_scan, - data_scan_id="data_scan_id_value", - ) - - # Make the request - operation = client.create_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_CreateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py deleted file mode 100644 index 34ba6263ff4b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_DeleteDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_DeleteDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py deleted file mode 100644 index da530afdeae4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_DeleteDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataScanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_DeleteDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py deleted file mode 100644 index 12a8addea03a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateDataQualityRules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = await client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py deleted file mode 100644 index 6baafa1eaa02..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GenerateDataQualityRules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_generate_data_quality_rules(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GenerateDataQualityRulesRequest( - name="name_value", - ) - - # Make the request - response = client.generate_data_quality_rules(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py deleted file mode 100644 index b5c832bc848a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py deleted file mode 100644 index 80598e607389..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScanJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScanJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_scan_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScanJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py deleted file mode 100644 index ddac11dcc9f5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScanJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScanJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_scan_job(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScanJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py deleted file mode 100644 index be7a3b9be4c2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_get_data_scan_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_GetDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_GetDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py deleted file mode 100644 index c61c2370d8d4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScanJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py deleted file mode 100644 index f2a0b4655a4f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScanJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_scan_jobs(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScanJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scan_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScanJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py deleted file mode 100644 index 48c610c9b06c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScans_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScans_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py deleted file mode 100644 index 92571d491f45..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_list_data_scans_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataScans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_ListDataScans_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_scans(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataScansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_scans(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataScanService_ListDataScans_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py deleted file mode 100644 index 2fa52865fd65..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_RunDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = await client.run_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_RunDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py deleted file mode 100644 index 33d96e7d6238..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_run_data_scan_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_RunDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_run_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunDataScanRequest( - name="name_value", - ) - - # Make the request - response = client.run_data_scan(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_RunDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py deleted file mode 100644 index ffcae850e8d9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_UpdateDataScan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceAsyncClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_UpdateDataScan_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py deleted file mode 100644 index c93c7c383986..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_scan_service_update_data_scan_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataScan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataScanService_UpdateDataScan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_scan(): - # Create a client - client = dataplex_v1.DataScanServiceClient() - - # Initialize request argument(s) - data_scan = dataplex_v1.DataScan() - data_scan.data_quality_spec.rules.dimension = "dimension_value" - data_scan.data.entity = "entity_value" - - request = dataplex_v1.UpdateDataScanRequest( - data_scan=data_scan, - ) - - # Make the request - operation = client.update_data_scan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataScanService_UpdateDataScan_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py deleted file mode 100644 index e2b2bf71b387..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py deleted file mode 100644 index beeb5680d9e5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py deleted file mode 100644 index a3e3aa93857e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.CreateDataAttributeBindingRequest( - parent="parent_value", - data_attribute_binding_id="data_attribute_binding_id_value", - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.create_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py deleted file mode 100644 index d6152220a602..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataAttributeRequest( - parent="parent_value", - data_attribute_id="data_attribute_id_value", - ) - - # Make the request - operation = client.create_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py deleted file mode 100644 index 99ce40ce34a9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py deleted file mode 100644 index 772a778084ad..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateDataTaxonomyRequest( - parent="parent_value", - data_taxonomy_id="data_taxonomy_id_value", - ) - - # Make the request - operation = client.create_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py deleted file mode 100644 index 0e64772d9c22..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py deleted file mode 100644 index c477330499b8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py deleted file mode 100644 index e364102af74f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeBindingRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - operation = client.delete_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py deleted file mode 100644 index 0c151d0cc561..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataAttributeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py deleted file mode 100644 index b436ccabc9fd..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py deleted file mode 100644 index bb970218bb82..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py deleted file mode 100644 index 1ef7d2f933b4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py deleted file mode 100644 index 3f4a9258be18..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py deleted file mode 100644 index 1726b15817dc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeBindingRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute_binding(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py deleted file mode 100644 index c772287eecea..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataAttributeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_attribute(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py deleted file mode 100644 index 7001118c3814..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py deleted file mode 100644 index 9171ee6b5c2c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetDataTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_taxonomy(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py deleted file mode 100644 index ddc48325c378..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributeBindings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py deleted file mode 100644 index a37350712c72..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributeBindings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_attribute_bindings(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributeBindingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attribute_bindings(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py deleted file mode 100644 index 884af22fadca..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py deleted file mode 100644 index ad286e0bccc0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataAttributes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_attributes(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataAttributesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_attributes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py deleted file mode 100644 index a8e44196d0fe..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py deleted file mode 100644 index 3199469fe760..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_data_taxonomies(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListDataTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py deleted file mode 100644 index 5538cdb101c5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py deleted file mode 100644 index 003d919da568..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py deleted file mode 100644 index bcde182293ec..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttributeBinding -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_attribute_binding(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - data_attribute_binding = dataplex_v1.DataAttributeBinding() - data_attribute_binding.resource = "resource_value" - - request = dataplex_v1.UpdateDataAttributeBindingRequest( - data_attribute_binding=data_attribute_binding, - ) - - # Make the request - operation = client.update_data_attribute_binding(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py deleted file mode 100644 index 72938b7a0084..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_attribute(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataAttributeRequest( - ) - - # Make the request - operation = client.update_data_attribute(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py deleted file mode 100644 index d27e74f03f83..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py deleted file mode 100644 index e2165abe9e32..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_data_taxonomy(): - # Create a client - client = dataplex_v1.DataTaxonomyServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateDataTaxonomyRequest( - ) - - # Make the request - operation = client.update_data_taxonomy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py deleted file mode 100644 index e0092b6956d1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - await client.cancel_job(request=request) - - -# [END dataplex_v1_generated_DataplexService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py deleted file mode 100644 index 402f8a637848..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_cancel_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_cancel_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - client.cancel_job(request=request) - - -# [END dataplex_v1_generated_DataplexService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py deleted file mode 100644 index 009395cfc039..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py deleted file mode 100644 index 4c973edec59f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_asset_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.CreateAssetRequest( - parent="parent_value", - asset_id="asset_id_value", - asset=asset, - ) - - # Make the request - operation = client.create_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py deleted file mode 100644 index c6179d548ca0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py deleted file mode 100644 index 6e83bdaa4850..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_environment_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.CreateEnvironmentRequest( - parent="parent_value", - environment_id="environment_id_value", - environment=environment, - ) - - # Make the request - operation = client.create_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py deleted file mode 100644 index 9cfc496cdf81..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py deleted file mode 100644 index 424884d08a39..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_lake_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.CreateLakeRequest( - parent="parent_value", - lake_id="lake_id_value", - ) - - # Make the request - operation = client.create_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py deleted file mode 100644 index 570446c72951..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py deleted file mode 100644 index 059d6e9caf59..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_task_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.CreateTaskRequest( - parent="parent_value", - task_id="task_id_value", - task=task, - ) - - # Make the request - operation = client.create_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py deleted file mode 100644 index 6019acc85718..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py deleted file mode 100644 index 6a4caa9ec79d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_create_zone_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_CreateZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.CreateZoneRequest( - parent="parent_value", - zone_id="zone_id_value", - zone=zone, - ) - - # Make the request - operation = client.create_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_CreateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py deleted file mode 100644 index cb38f16150f4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py deleted file mode 100644 index df1f8c9727f0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_asset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteAssetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py deleted file mode 100644 index e4ddd35aff61..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py deleted file mode 100644 index bef7a8b3bbc9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_environment_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEnvironmentRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py deleted file mode 100644 index b471608aca67..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py deleted file mode 100644 index e0712b0d17b8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_lake_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteLakeRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py deleted file mode 100644 index 770fcc16f1f4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py deleted file mode 100644 index f467f23478b7..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteTaskRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py deleted file mode 100644 index c312b04269ba..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py deleted file mode 100644 index c13e5cd84601..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_zone_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_DeleteZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteZoneRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_DeleteZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py deleted file mode 100644 index 93fe6155f85b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_asset(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py deleted file mode 100644 index 92f815fcbc39..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_asset_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetAssetRequest( - name="name_value", - ) - - # Make the request - response = client.get_asset(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py deleted file mode 100644 index c54409b11c2c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_environment(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py deleted file mode 100644 index 94f419065b91..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_environment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEnvironmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_environment(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py deleted file mode 100644 index cd1d8e123e95..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py deleted file mode 100644 index 8b332b6b6de9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_job(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py deleted file mode 100644 index b42d72569d47..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lake(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py deleted file mode 100644 index 610dd6b9dabc..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_lake_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetLakeRequest( - name="name_value", - ) - - # Make the request - response = client.get_lake(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py deleted file mode 100644 index e032ebe67bc5..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py deleted file mode 100644 index 147f2f6893d0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py deleted file mode 100644 index 99dcf4f47a02..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = await client.get_zone(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py deleted file mode 100644 index 9f91127efc5f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_get_zone_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_GetZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetZoneRequest( - name="name_value", - ) - - # Make the request - response = client.get_zone(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_GetZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py deleted file mode 100644 index 8845154ecd43..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssetActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssetActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssetActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py deleted file mode 100644 index 417850a8ef86..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssetActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssetActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_asset_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_asset_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssetActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py deleted file mode 100644 index 78a17cdc6cd6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py deleted file mode 100644 index b12afd0684d9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_assets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_assets(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py deleted file mode 100644 index a816672d20ba..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEnvironments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListEnvironments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListEnvironments_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py deleted file mode 100644 index a87c26037b1f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_environments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEnvironments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListEnvironments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_environments(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEnvironmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_environments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListEnvironments_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py deleted file mode 100644 index 519c8b556f56..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py deleted file mode 100644 index 231b588f3d08..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_jobs(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py deleted file mode 100644 index b3b3b90becee..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakeActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakeActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakeActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py deleted file mode 100644 index 4eb7cfeacfb1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakeActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakeActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_lake_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakeActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lake_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakeActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py deleted file mode 100644 index 1821ff51eb42..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakes_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py deleted file mode 100644 index 30bd707e596c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_lakes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLakes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListLakes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_lakes(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListLakesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_lakes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListLakes_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py deleted file mode 100644 index 87fde35485ec..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListSessions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py deleted file mode 100644 index 5e2349662e53..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_sessions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListSessionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListSessions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py deleted file mode 100644 index ee1bb7a358f2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListTasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py deleted file mode 100644 index 8d6ea9e99d91..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_tasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListTasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_tasks(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py deleted file mode 100644 index 11d1477126da..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZoneActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZoneActions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZoneActions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py deleted file mode 100644 index f254856503de..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZoneActions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZoneActions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_zone_actions(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZoneActionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zone_actions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZoneActions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py deleted file mode 100644 index 8a3a26b0f9c8..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZones -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZones_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZones_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py deleted file mode 100644 index 314a861f927e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zones_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListZones -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_ListZones_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_zones(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListZonesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_zones(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_DataplexService_ListZones_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py deleted file mode 100644 index ea91643d7ee1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_RunTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.run_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_RunTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py deleted file mode 100644 index cca14742d157..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_run_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_RunTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_run_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.RunTaskRequest( - name="name_value", - ) - - # Make the request - response = client.run_task(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_RunTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py deleted file mode 100644 index 1825dd3af6ae..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateAsset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateAsset_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py deleted file mode 100644 index 57a672651f46..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_asset_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAsset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateAsset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_asset(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - asset = dataplex_v1.Asset() - asset.resource_spec.type_ = "BIGQUERY_DATASET" - - request = dataplex_v1.UpdateAssetRequest( - asset=asset, - ) - - # Make the request - operation = client.update_asset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateAsset_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py deleted file mode 100644 index 07d1d4985472..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py deleted file mode 100644 index b77bfdcee105..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_environment_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEnvironment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_environment(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - environment = dataplex_v1.Environment() - environment.infrastructure_spec.os_image.image_version = "image_version_value" - - request = dataplex_v1.UpdateEnvironmentRequest( - environment=environment, - ) - - # Make the request - operation = client.update_environment(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateEnvironment_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py deleted file mode 100644 index 3d049c83d769..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateLake_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateLake_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py deleted file mode 100644 index 334145584703..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_lake_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateLake -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateLake_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_lake(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.UpdateLakeRequest( - ) - - # Make the request - operation = client.update_lake(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateLake_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py deleted file mode 100644 index 1f02d2952d62..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateTask_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py deleted file mode 100644 index a7afc9397dc0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_task_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_task(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - task = dataplex_v1.Task() - task.spark.main_jar_file_uri = "main_jar_file_uri_value" - task.trigger_spec.schedule = "schedule_value" - task.trigger_spec.type_ = "RECURRING" - task.execution_spec.service_account = "service_account_value" - - request = dataplex_v1.UpdateTaskRequest( - task=task, - ) - - # Make the request - operation = client.update_task(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateTask_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py deleted file mode 100644 index 80f26b89522d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateZone_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceAsyncClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateZone_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py deleted file mode 100644 index ec2683f4dc9f..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_dataplex_service_update_zone_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateZone -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_DataplexService_UpdateZone_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_zone(): - # Create a client - client = dataplex_v1.DataplexServiceClient() - - # Initialize request argument(s) - zone = dataplex_v1.Zone() - zone.type_ = "CURATED" - zone.resource_spec.location_type = "MULTI_REGION" - - request = dataplex_v1.UpdateZoneRequest( - zone=zone, - ) - - # Make the request - operation = client.update_zone(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataplex_v1_generated_DataplexService_UpdateZone_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py deleted file mode 100644 index 0464145a93b4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreateEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = await client.create_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py deleted file mode 100644 index fc9b2a1e1368..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_entity_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreateEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.CreateEntityRequest( - parent="parent_value", - entity=entity, - ) - - # Make the request - response = client.create_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py deleted file mode 100644 index 9ac3bd423a53..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreatePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = await client.create_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreatePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py deleted file mode 100644 index 68759f52656c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_create_partition_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_CreatePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_create_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - partition = dataplex_v1.Partition() - partition.values = ['values_value1', 'values_value2'] - partition.location = "location_value" - - request = dataplex_v1.CreatePartitionRequest( - parent="parent_value", - partition=partition, - ) - - # Make the request - response = client.create_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_CreatePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py deleted file mode 100644 index 0c1ddc783b69..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeleteEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - await client.delete_entity(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeleteEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py deleted file mode 100644 index eda8649ce8c9..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_entity_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeleteEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeleteEntityRequest( - name="name_value", - etag="etag_value", - ) - - # Make the request - client.delete_entity(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeleteEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py deleted file mode 100644 index 3b92c18589ef..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeletePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_partition(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeletePartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py deleted file mode 100644 index 0caa639adb6c..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_delete_partition_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_DeletePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_delete_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.DeletePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_partition(request=request) - - -# [END dataplex_v1_generated_MetadataService_DeletePartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py deleted file mode 100644 index 8c6bfd66e0c2..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py deleted file mode 100644 index 91400bfc9e7d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_entity_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetEntityRequest( - name="name_value", - ) - - # Make the request - response = client.get_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py deleted file mode 100644 index bdd927959bcb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetPartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetPartition_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py deleted file mode 100644 index b5fb1a52ecf6..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_get_partition_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_GetPartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_get_partition(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.GetPartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_partition(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_GetPartition_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py deleted file mode 100644 index 39c288b6c43e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListEntities_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py deleted file mode 100644 index f03b686a369a..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_entities_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_entities(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListEntitiesRequest( - parent="parent_value", - view="FILESETS", - ) - - # Make the request - page_result = client.list_entities(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListEntities_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py deleted file mode 100644 index cbd82d4433a4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListPartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListPartitions_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py deleted file mode 100644 index 2854a45a94a1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_list_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_ListPartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_list_partitions(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - request = dataplex_v1.ListPartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataplex_v1_generated_MetadataService_ListPartitions_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py deleted file mode 100644 index 6f6d1525a4c4..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_UpdateEntity_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -async def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceAsyncClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = await client.update_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_UpdateEntity_async] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py deleted file mode 100644 index fa71e34d815b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_metadata_service_update_entity_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntity -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataplex - - -# [START dataplex_v1_generated_MetadataService_UpdateEntity_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataplex_v1 - - -def sample_update_entity(): - # Create a client - client = dataplex_v1.MetadataServiceClient() - - # Initialize request argument(s) - entity = dataplex_v1.Entity() - entity.id = "id_value" - entity.type_ = "FILESET" - entity.asset = "asset_value" - entity.data_path = "data_path_value" - entity.system = "BIGQUERY" - entity.format_.mime_type = "mime_type_value" - entity.schema.user_managed = True - - request = dataplex_v1.UpdateEntityRequest( - entity=entity, - ) - - # Make the request - response = client.update_entity(request=request) - - # Handle the response - print(response) - -# [END dataplex_v1_generated_MetadataService_UpdateEntity_sync] diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json deleted file mode 100644 index a12a3f36de1e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ /dev/null @@ -1,20224 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.dataplex.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-dataplex", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "category", - "type": "google.cloud.dataplex_v1.types.GlossaryCategory" - }, - { - "name": "category_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "create_glossary_category" - }, - "description": "Sample for CreateGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "category", - "type": "google.cloud.dataplex_v1.types.GlossaryCategory" - }, - { - "name": "category_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "create_glossary_category" - }, - "description": "Sample for CreateGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "term", - "type": "google.cloud.dataplex_v1.types.GlossaryTerm" - }, - { - "name": "term_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "create_glossary_term" - }, - "description": "Sample for CreateGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "term", - "type": "google.cloud.dataplex_v1.types.GlossaryTerm" - }, - { - "name": "term_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "create_glossary_term" - }, - "description": "Sample for CreateGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "glossary", - "type": "google.cloud.dataplex_v1.types.Glossary" - }, - { - "name": "glossary_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_glossary" - }, - "description": "Sample for CreateGlossary", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "CreateGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "glossary", - "type": "google.cloud.dataplex_v1.types.Glossary" - }, - { - "name": "glossary_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_glossary" - }, - "description": "Sample for CreateGlossary", - "file": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_glossary_category" - }, - "description": "Sample for DeleteGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_glossary_category" - }, - "description": "Sample for DeleteGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_glossary_term" - }, - "description": "Sample for DeleteGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_glossary_term" - }, - "description": "Sample for DeleteGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_glossary" - }, - "description": "Sample for DeleteGlossary", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "DeleteGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_glossary" - }, - "description": "Sample for DeleteGlossary", - "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "get_glossary_category" - }, - "description": "Sample for GetGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "get_glossary_category" - }, - "description": "Sample for GetGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "get_glossary_term" - }, - "description": "Sample for GetGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "get_glossary_term" - }, - "description": "Sample for GetGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Glossary", - "shortName": "get_glossary" - }, - "description": "Sample for GetGlossary", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "GetGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Glossary", - "shortName": "get_glossary" - }, - "description": "Sample for GetGlossary", - "file": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossaries", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager", - "shortName": "list_glossaries" - }, - "description": "Sample for ListGlossaries", - "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager", - "shortName": "list_glossaries" - }, - "description": "Sample for ListGlossaries", - "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_categories", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaryCategories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager", - "shortName": "list_glossary_categories" - }, - "description": "Sample for ListGlossaryCategories", - "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaryCategories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager", - "shortName": "list_glossary_categories" - }, - "description": "Sample for ListGlossaryCategories", - "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_terms", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaryTerms" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager", - "shortName": "list_glossary_terms" - }, - "description": "Sample for ListGlossaryTerms", - "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "ListGlossaryTerms" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager", - "shortName": "list_glossary_terms" - }, - "description": "Sample for ListGlossaryTerms", - "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" - }, - { - "name": "category", - "type": "google.cloud.dataplex_v1.types.GlossaryCategory" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "update_glossary_category" - }, - "description": "Sample for UpdateGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossaryCategory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" - }, - { - "name": "category", - "type": "google.cloud.dataplex_v1.types.GlossaryCategory" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", - "shortName": "update_glossary_category" - }, - "description": "Sample for UpdateGlossaryCategory", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" - }, - { - "name": "term", - "type": "google.cloud.dataplex_v1.types.GlossaryTerm" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "update_glossary_term" - }, - "description": "Sample for UpdateGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossaryTerm" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" - }, - { - "name": "term", - "type": "google.cloud.dataplex_v1.types.GlossaryTerm" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", - "shortName": "update_glossary_term" - }, - "description": "Sample for UpdateGlossaryTerm", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", - "shortName": "BusinessGlossaryServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" - }, - { - "name": "glossary", - "type": "google.cloud.dataplex_v1.types.Glossary" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_glossary" - }, - "description": "Sample for UpdateGlossary", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", - "shortName": "BusinessGlossaryServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", - "method": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", - "service": { - "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "shortName": "BusinessGlossaryService" - }, - "shortName": "UpdateGlossary" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" - }, - { - "name": "glossary", - "type": "google.cloud.dataplex_v1.types.Glossary" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_glossary" - }, - "description": "Sample for UpdateGlossary", - "file": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CancelMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_metadata_job" - }, - "description": "Sample for CancelMetadataJob", - "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CancelMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_metadata_job" - }, - "description": "Sample for CancelMetadataJob", - "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "aspect_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_aspect_type" - }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "aspect_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_aspect_type" - }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_link", - "type": "google.cloud.dataplex_v1.types.EntryLink" - }, - { - "name": "entry_link_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "create_entry_link" - }, - "description": "Sample for CreateEntryLink", - "file": "dataplex_v1_generated_catalog_service_create_entry_link_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_link_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_link", - "type": "google.cloud.dataplex_v1.types.EntryLink" - }, - { - "name": "entry_link_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "create_entry_link" - }, - "description": "Sample for CreateEntryLink", - "file": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "entry_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_type" - }, - "description": "Sample for CreateEntryType", - "file": "dataplex_v1_generated_catalog_service_create_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryTypeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "entry_type_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_type" - }, - "description": "Sample for CreateEntryType", - "file": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryType_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "dataplex_v1_generated_catalog_service_create_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "dataplex_v1_generated_catalog_service_create_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntry_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" - }, - { - "name": "metadata_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_metadata_job" - }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "CreateMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" - }, - { - "name": "metadata_job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_metadata_job" - }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_aspect_type" - }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_aspect_type" - }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "delete_entry_link" - }, - "description": "Sample for DeleteEntryLink", - "file": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "delete_entry_link" - }, - "description": "Sample for DeleteEntryLink", - "file": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_type" - }, - "description": "Sample for DeleteEntryType", - "file": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_type" - }, - "description": "Sample for DeleteEntryType", - "file": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryType_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "dataplex_v1_generated_catalog_service_delete_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "dataplex_v1_generated_catalog_service_delete_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" - }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" - }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "get_entry_link" - }, - "description": "Sample for GetEntryLink", - "file": "dataplex_v1_generated_catalog_service_get_entry_link_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_link_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryLink" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryLink", - "shortName": "get_entry_link" - }, - "description": "Sample for GetEntryLink", - "file": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" - }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" - }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.MetadataJob", - "shortName": "get_metadata_job" - }, - "description": "Sample for GetMetadataJob", - "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "GetMetadataJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.MetadataJob", - "shortName": "get_metadata_job" - }, - "description": "Sample for GetMetadataJob", - "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_aspect_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListAspectTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager", - "shortName": "list_aspect_types" - }, - "description": "Sample for ListAspectTypes", - "file": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_aspect_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_aspect_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListAspectTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListAspectTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAspectTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager", - "shortName": "list_aspect_types" - }, - "description": "Sample for ListAspectTypes", - "file": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListAspectTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_aspect_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "dataplex_v1_generated_catalog_service_list_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "dataplex_v1_generated_catalog_service_list_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_groups", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_groups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_groups", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryGroups", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryGroups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_groups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_entry_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager", - "shortName": "list_entry_types" - }, - "description": "Sample for ListEntryTypes", - "file": "dataplex_v1_generated_catalog_service_list_entry_types_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_types_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_entry_types", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListEntryTypes", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListEntryTypes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntryTypesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager", - "shortName": "list_entry_types" - }, - "description": "Sample for ListEntryTypes", - "file": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListEntryTypes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListMetadataJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", - "shortName": "list_metadata_jobs" - }, - "description": "Sample for ListMetadataJobs", - "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "ListMetadataJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", - "shortName": "list_metadata_jobs" - }, - "description": "Sample for ListMetadataJobs", - "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.lookup_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "dataplex_v1_generated_catalog_service_lookup_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_lookup_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.lookup_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.LookupEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_LookupEntry_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_lookup_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.search_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "SearchEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesAsyncPager", - "shortName": "search_entries" - }, - "description": "Sample for SearchEntries", - "file": "dataplex_v1_generated_catalog_service_search_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_search_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.search_entries", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.SearchEntries", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "SearchEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.SearchEntriesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.SearchEntriesPager", - "shortName": "search_entries" - }, - "description": "Sample for SearchEntries", - "file": "dataplex_v1_generated_catalog_service_search_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_SearchEntries_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_search_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_aspect_type" - }, - "description": "Sample for UpdateAspectType", - "file": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_aspect_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_aspect_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateAspectType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateAspectType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAspectTypeRequest" - }, - { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_aspect_type" - }, - "description": "Sample for UpdateAspectType", - "file": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateAspectType_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_aspect_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_update_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_group", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryGroup_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_entry_type" - }, - "description": "Sample for UpdateEntryType", - "file": "dataplex_v1_generated_catalog_service_update_entry_type_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_type_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry_type", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntryType", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntryType" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryTypeRequest" - }, - { - "name": "entry_type", - "type": "google.cloud.dataplex_v1.types.EntryType" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_entry_type" - }, - "description": "Sample for UpdateEntryType", - "file": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntryType_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_type_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", - "shortName": "CatalogServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.update_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "dataplex_v1_generated_catalog_service_update_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", - "shortName": "CatalogServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.update_entry", - "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.UpdateEntry", - "service": { - "fullName": "google.cloud.dataplex.v1.CatalogService", - "shortName": "CatalogService" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.dataplex_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "dataplex_v1_generated_catalog_service_update_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_UpdateEntry_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_catalog_service_update_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", - "shortName": "CmekServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.create_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.CreateEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "CreateEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "encryption_config", - "type": "google.cloud.dataplex_v1.types.EncryptionConfig" - }, - { - "name": "encryption_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_encryption_config" - }, - "description": "Sample for CreateEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_create_encryption_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_CreateEncryptionConfig_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_create_encryption_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceClient", - "shortName": "CmekServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceClient.create_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.CreateEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "CreateEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEncryptionConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "encryption_config", - "type": "google.cloud.dataplex_v1.types.EncryptionConfig" - }, - { - "name": "encryption_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_encryption_config" - }, - "description": "Sample for CreateEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_create_encryption_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_CreateEncryptionConfig_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_create_encryption_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", - "shortName": "CmekServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.delete_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.DeleteEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "DeleteEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_encryption_config" - }, - "description": "Sample for DeleteEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_delete_encryption_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_DeleteEncryptionConfig_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_delete_encryption_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceClient", - "shortName": "CmekServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceClient.delete_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.DeleteEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "DeleteEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEncryptionConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_encryption_config" - }, - "description": "Sample for DeleteEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_DeleteEncryptionConfig_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_delete_encryption_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", - "shortName": "CmekServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.get_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.GetEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "GetEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEncryptionConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EncryptionConfig", - "shortName": "get_encryption_config" - }, - "description": "Sample for GetEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_get_encryption_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_GetEncryptionConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_get_encryption_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceClient", - "shortName": "CmekServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceClient.get_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.GetEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "GetEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEncryptionConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.EncryptionConfig", - "shortName": "get_encryption_config" - }, - "description": "Sample for GetEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_get_encryption_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_GetEncryptionConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_get_encryption_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", - "shortName": "CmekServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.list_encryption_configs", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.ListEncryptionConfigs", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "ListEncryptionConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsAsyncPager", - "shortName": "list_encryption_configs" - }, - "description": "Sample for ListEncryptionConfigs", - "file": "dataplex_v1_generated_cmek_service_list_encryption_configs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_ListEncryptionConfigs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_list_encryption_configs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceClient", - "shortName": "CmekServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceClient.list_encryption_configs", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.ListEncryptionConfigs", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "ListEncryptionConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEncryptionConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.cmek_service.pagers.ListEncryptionConfigsPager", - "shortName": "list_encryption_configs" - }, - "description": "Sample for ListEncryptionConfigs", - "file": "dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_ListEncryptionConfigs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_list_encryption_configs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient", - "shortName": "CmekServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceAsyncClient.update_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.UpdateEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "UpdateEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest" - }, - { - "name": "encryption_config", - "type": "google.cloud.dataplex_v1.types.EncryptionConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_encryption_config" - }, - "description": "Sample for UpdateEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_update_encryption_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_UpdateEncryptionConfig_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_update_encryption_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.CmekServiceClient", - "shortName": "CmekServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.CmekServiceClient.update_encryption_config", - "method": { - "fullName": "google.cloud.dataplex.v1.CmekService.UpdateEncryptionConfig", - "service": { - "fullName": "google.cloud.dataplex.v1.CmekService", - "shortName": "CmekService" - }, - "shortName": "UpdateEncryptionConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEncryptionConfigRequest" - }, - { - "name": "encryption_config", - "type": "google.cloud.dataplex_v1.types.EncryptionConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_encryption_config" - }, - "description": "Sample for UpdateEncryptionConfig", - "file": "dataplex_v1_generated_cmek_service_update_encryption_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CmekService_UpdateEncryptionConfig_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_cmek_service_update_encryption_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.create_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "CreateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "create_content" - }, - "description": "Sample for CreateContent", - "file": "dataplex_v1_generated_content_service_create_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_CreateContent_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_create_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.create_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.CreateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "CreateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "create_content" - }, - "description": "Sample for CreateContent", - "file": "dataplex_v1_generated_content_service_create_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_CreateContent_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_create_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.delete_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "DeleteContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_content" - }, - "description": "Sample for DeleteContent", - "file": "dataplex_v1_generated_content_service_delete_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_delete_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.delete_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.DeleteContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "DeleteContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_content" - }, - "description": "Sample for DeleteContent", - "file": "dataplex_v1_generated_content_service_delete_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_DeleteContent_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_delete_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "get_content" - }, - "description": "Sample for GetContent", - "file": "dataplex_v1_generated_content_service_get_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetContent_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetContentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "get_content" - }, - "description": "Sample for GetContent", - "file": "dataplex_v1_generated_content_service_get_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetContent_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "dataplex_v1_generated_content_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.GetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "dataplex_v1_generated_content_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.list_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "ListContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager", - "shortName": "list_content" - }, - "description": "Sample for ListContent", - "file": "dataplex_v1_generated_content_service_list_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_ListContent_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_list_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.list_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.ListContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "ListContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListContentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager", - "shortName": "list_content" - }, - "description": "Sample for ListContent", - "file": "dataplex_v1_generated_content_service_list_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_ListContent_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_list_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "dataplex_v1_generated_content_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.SetIamPolicy", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "dataplex_v1_generated_content_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "dataplex_v1_generated_content_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.TestIamPermissions", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient", - "shortName": "ContentServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceAsyncClient.update_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "UpdateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "update_content" - }, - "description": "Sample for UpdateContent", - "file": "dataplex_v1_generated_content_service_update_content_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_update_content_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.ContentServiceClient", - "shortName": "ContentServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.ContentServiceClient.update_content", - "method": { - "fullName": "google.cloud.dataplex.v1.ContentService.UpdateContent", - "service": { - "fullName": "google.cloud.dataplex.v1.ContentService", - "shortName": "ContentService" - }, - "shortName": "UpdateContent" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateContentRequest" - }, - { - "name": "content", - "type": "google.cloud.dataplex_v1.types.Content" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Content", - "shortName": "update_content" - }, - "description": "Sample for UpdateContent", - "file": "dataplex_v1_generated_content_service_update_content_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_ContentService_UpdateContent_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_content_service_update_content_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.create_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "CreateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "data_scan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_scan" - }, - "description": "Sample for CreateDataScan", - "file": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_create_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.create_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.CreateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "CreateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataScanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "data_scan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_scan" - }, - "description": "Sample for CreateDataScan", - "file": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_CreateDataScan_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_create_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.delete_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "DeleteDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_scan" - }, - "description": "Sample for DeleteDataScan", - "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.delete_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.DeleteDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "DeleteDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_scan" - }, - "description": "Sample for DeleteDataScan", - "file": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_DeleteDataScan_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_delete_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.generate_data_quality_rules", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GenerateDataQualityRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", - "shortName": "generate_data_quality_rules" - }, - "description": "Sample for GenerateDataQualityRules", - "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.generate_data_quality_rules", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GenerateDataQualityRules", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GenerateDataQualityRules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.GenerateDataQualityRulesResponse", - "shortName": "generate_data_quality_rules" - }, - "description": "Sample for GenerateDataQualityRules", - "file": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GenerateDataQualityRules_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_generate_data_quality_rules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScanJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScanJob", - "shortName": "get_data_scan_job" - }, - "description": "Sample for GetDataScanJob", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScanJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScanJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScanJob", - "shortName": "get_data_scan_job" - }, - "description": "Sample for GetDataScanJob", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScanJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.get_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScan", - "shortName": "get_data_scan" - }, - "description": "Sample for GetDataScan", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.get_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.GetDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "GetDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataScan", - "shortName": "get_data_scan" - }, - "description": "Sample for GetDataScan", - "file": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_GetDataScan_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_get_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scan_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScanJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager", - "shortName": "list_data_scan_jobs" - }, - "description": "Sample for ListDataScanJobs", - "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scan_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScanJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScanJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScanJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager", - "shortName": "list_data_scan_jobs" - }, - "description": "Sample for ListDataScanJobs", - "file": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScanJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scan_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.list_data_scans", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager", - "shortName": "list_data_scans" - }, - "description": "Sample for ListDataScans", - "file": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scans_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.list_data_scans", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.ListDataScans", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "ListDataScans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataScansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager", - "shortName": "list_data_scans" - }, - "description": "Sample for ListDataScans", - "file": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_ListDataScans_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_list_data_scans_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.run_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "RunDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", - "shortName": "run_data_scan" - }, - "description": "Sample for RunDataScan", - "file": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_run_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.run_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.RunDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "RunDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunDataScanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunDataScanResponse", - "shortName": "run_data_scan" - }, - "description": "Sample for RunDataScan", - "file": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_RunDataScan_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_run_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient", - "shortName": "DataScanServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceAsyncClient.update_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "UpdateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_scan" - }, - "description": "Sample for UpdateDataScan", - "file": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_update_data_scan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient", - "shortName": "DataScanServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataScanServiceClient.update_data_scan", - "method": { - "fullName": "google.cloud.dataplex.v1.DataScanService.UpdateDataScan", - "service": { - "fullName": "google.cloud.dataplex.v1.DataScanService", - "shortName": "DataScanService" - }, - "shortName": "UpdateDataScan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataScanRequest" - }, - { - "name": "data_scan", - "type": "google.cloud.dataplex_v1.types.DataScan" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_scan" - }, - "description": "Sample for UpdateDataScan", - "file": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataScanService_UpdateDataScan_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_scan_service_update_data_scan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "data_attribute_binding_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_attribute_binding" - }, - "description": "Sample for CreateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeBindingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "data_attribute_binding_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_attribute_binding" - }, - "description": "Sample for CreateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttributeBinding_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "data_attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_attribute" - }, - "description": "Sample for CreateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "data_attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_attribute" - }, - "description": "Sample for CreateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataAttribute_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.create_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "data_taxonomy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_data_taxonomy" - }, - "description": "Sample for CreateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.create_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.CreateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "CreateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateDataTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "data_taxonomy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_data_taxonomy" - }, - "description": "Sample for CreateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_CreateDataTaxonomy_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_create_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_attribute_binding" - }, - "description": "Sample for DeleteDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_attribute_binding" - }, - "description": "Sample for DeleteDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttributeBinding_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_attribute" - }, - "description": "Sample for DeleteDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_attribute" - }, - "description": "Sample for DeleteDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataAttribute_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.delete_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_data_taxonomy" - }, - "description": "Sample for DeleteDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.delete_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.DeleteDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "DeleteDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_data_taxonomy" - }, - "description": "Sample for DeleteDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_DeleteDataTaxonomy_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_delete_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", - "shortName": "get_data_attribute_binding" - }, - "description": "Sample for GetDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeBindingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttributeBinding", - "shortName": "get_data_attribute_binding" - }, - "description": "Sample for GetDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttributeBinding_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttribute", - "shortName": "get_data_attribute" - }, - "description": "Sample for GetDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataAttribute", - "shortName": "get_data_attribute" - }, - "description": "Sample for GetDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataAttribute_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.get_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", - "shortName": "get_data_taxonomy" - }, - "description": "Sample for GetDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.get_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.GetDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "GetDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetDataTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.DataTaxonomy", - "shortName": "get_data_taxonomy" - }, - "description": "Sample for GetDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_GetDataTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_get_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attribute_bindings", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributeBindings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager", - "shortName": "list_data_attribute_bindings" - }, - "description": "Sample for ListDataAttributeBindings", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attribute_bindings", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributeBindings", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributeBindings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributeBindingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager", - "shortName": "list_data_attribute_bindings" - }, - "description": "Sample for ListDataAttributeBindings", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributeBindings_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attribute_bindings_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_attributes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager", - "shortName": "list_data_attributes" - }, - "description": "Sample for ListDataAttributes", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_attributes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataAttributes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager", - "shortName": "list_data_attributes" - }, - "description": "Sample for ListDataAttributes", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataAttributes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_attributes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.list_data_taxonomies", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager", - "shortName": "list_data_taxonomies" - }, - "description": "Sample for ListDataTaxonomies", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.list_data_taxonomies", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.ListDataTaxonomies", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "ListDataTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListDataTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager", - "shortName": "list_data_taxonomies" - }, - "description": "Sample for ListDataTaxonomies", - "file": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_ListDataTaxonomies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_list_data_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_attribute_binding" - }, - "description": "Sample for UpdateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute_binding", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttributeBinding", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttributeBinding" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeBindingRequest" - }, - { - "name": "data_attribute_binding", - "type": "google.cloud.dataplex_v1.types.DataAttributeBinding" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_attribute_binding" - }, - "description": "Sample for UpdateDataAttributeBinding", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttributeBinding_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_binding_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_attribute" - }, - "description": "Sample for UpdateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_attribute", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataAttribute", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataAttributeRequest" - }, - { - "name": "data_attribute", - "type": "google.cloud.dataplex_v1.types.DataAttribute" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_attribute" - }, - "description": "Sample for UpdateDataAttribute", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataAttribute_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient", - "shortName": "DataTaxonomyServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceAsyncClient.update_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_taxonomy" - }, - "description": "Sample for UpdateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient", - "shortName": "DataTaxonomyServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataTaxonomyServiceClient.update_data_taxonomy", - "method": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService.UpdateDataTaxonomy", - "service": { - "fullName": "google.cloud.dataplex.v1.DataTaxonomyService", - "shortName": "DataTaxonomyService" - }, - "shortName": "UpdateDataTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateDataTaxonomyRequest" - }, - { - "name": "data_taxonomy", - "type": "google.cloud.dataplex_v1.types.DataTaxonomy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_taxonomy" - }, - "description": "Sample for UpdateDataTaxonomy", - "file": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataTaxonomyService_UpdateDataTaxonomy_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_data_taxonomy_service_update_data_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataplex_v1_generated_dataplex_service_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.cancel_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CancelJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CancelJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "asset_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_asset" - }, - "description": "Sample for CreateAsset", - "file": "dataplex_v1_generated_dataplex_service_create_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAssetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "asset_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_asset" - }, - "description": "Sample for CreateAsset", - "file": "dataplex_v1_generated_dataplex_service_create_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateAsset_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "environment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_environment" - }, - "description": "Sample for CreateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_create_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEnvironmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "environment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_environment" - }, - "description": "Sample for CreateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_create_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateEnvironment_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "lake_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_lake" - }, - "description": "Sample for CreateLake", - "file": "dataplex_v1_generated_dataplex_service_create_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateLakeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "lake_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_lake" - }, - "description": "Sample for CreateLake", - "file": "dataplex_v1_generated_dataplex_service_create_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateLake_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_task" - }, - "description": "Sample for CreateTask", - "file": "dataplex_v1_generated_dataplex_service_create_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateTaskRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_task" - }, - "description": "Sample for CreateTask", - "file": "dataplex_v1_generated_dataplex_service_create_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateTask_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.create_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "zone_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_zone" - }, - "description": "Sample for CreateZone", - "file": "dataplex_v1_generated_dataplex_service_create_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.create_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.CreateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "CreateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateZoneRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "zone_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_zone" - }, - "description": "Sample for CreateZone", - "file": "dataplex_v1_generated_dataplex_service_create_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_CreateZone_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_create_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_asset" - }, - "description": "Sample for DeleteAsset", - "file": "dataplex_v1_generated_dataplex_service_delete_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_asset" - }, - "description": "Sample for DeleteAsset", - "file": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteAsset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_environment" - }, - "description": "Sample for DeleteEnvironment", - "file": "dataplex_v1_generated_dataplex_service_delete_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_environment" - }, - "description": "Sample for DeleteEnvironment", - "file": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteEnvironment_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_lake" - }, - "description": "Sample for DeleteLake", - "file": "dataplex_v1_generated_dataplex_service_delete_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_lake" - }, - "description": "Sample for DeleteLake", - "file": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteLake_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_task" - }, - "description": "Sample for DeleteTask", - "file": "dataplex_v1_generated_dataplex_service_delete_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_task" - }, - "description": "Sample for DeleteTask", - "file": "dataplex_v1_generated_dataplex_service_delete_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteTask_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.delete_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_zone" - }, - "description": "Sample for DeleteZone", - "file": "dataplex_v1_generated_dataplex_service_delete_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.delete_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.DeleteZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "DeleteZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_zone" - }, - "description": "Sample for DeleteZone", - "file": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_DeleteZone_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_delete_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Asset", - "shortName": "get_asset" - }, - "description": "Sample for GetAsset", - "file": "dataplex_v1_generated_dataplex_service_get_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAssetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Asset", - "shortName": "get_asset" - }, - "description": "Sample for GetAsset", - "file": "dataplex_v1_generated_dataplex_service_get_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetAsset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Environment", - "shortName": "get_environment" - }, - "description": "Sample for GetEnvironment", - "file": "dataplex_v1_generated_dataplex_service_get_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEnvironmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Environment", - "shortName": "get_environment" - }, - "description": "Sample for GetEnvironment", - "file": "dataplex_v1_generated_dataplex_service_get_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetEnvironment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataplex_v1_generated_dataplex_service_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_job", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetJob", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataplex_v1_generated_dataplex_service_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Lake", - "shortName": "get_lake" - }, - "description": "Sample for GetLake", - "file": "dataplex_v1_generated_dataplex_service_get_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetLake_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetLakeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Lake", - "shortName": "get_lake" - }, - "description": "Sample for GetLake", - "file": "dataplex_v1_generated_dataplex_service_get_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetLake_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "dataplex_v1_generated_dataplex_service_get_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "dataplex_v1_generated_dataplex_service_get_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.get_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Zone", - "shortName": "get_zone" - }, - "description": "Sample for GetZone", - "file": "dataplex_v1_generated_dataplex_service_get_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetZone_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.get_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.GetZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "GetZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetZoneRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Zone", - "shortName": "get_zone" - }, - "description": "Sample for GetZone", - "file": "dataplex_v1_generated_dataplex_service_get_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_GetZone_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_get_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_asset_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssetActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager", - "shortName": "list_asset_actions" - }, - "description": "Sample for ListAssetActions", - "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_asset_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssetActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssetActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager", - "shortName": "list_asset_actions" - }, - "description": "Sample for ListAssetActions", - "file": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssetActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_asset_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_assets", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "dataplex_v1_generated_dataplex_service_list_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_assets", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListAssets", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "dataplex_v1_generated_dataplex_service_list_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListAssets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_environments", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListEnvironments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager", - "shortName": "list_environments" - }, - "description": "Sample for ListEnvironments", - "file": "dataplex_v1_generated_dataplex_service_list_environments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_environments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_environments", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListEnvironments", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListEnvironments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEnvironmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager", - "shortName": "list_environments" - }, - "description": "Sample for ListEnvironments", - "file": "dataplex_v1_generated_dataplex_service_list_environments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListEnvironments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_environments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataplex_v1_generated_dataplex_service_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListJobs", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lake_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakeActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager", - "shortName": "list_lake_actions" - }, - "description": "Sample for ListLakeActions", - "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lake_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakeActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakeActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakeActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager", - "shortName": "list_lake_actions" - }, - "description": "Sample for ListLakeActions", - "file": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakeActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lake_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_lakes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager", - "shortName": "list_lakes" - }, - "description": "Sample for ListLakes", - "file": "dataplex_v1_generated_dataplex_service_list_lakes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lakes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_lakes", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListLakes", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListLakes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListLakesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager", - "shortName": "list_lakes" - }, - "description": "Sample for ListLakes", - "file": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListLakes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_lakes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_sessions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "dataplex_v1_generated_dataplex_service_list_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_sessions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListSessions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListSessionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_tasks", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "dataplex_v1_generated_dataplex_service_list_tasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_tasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListTasks", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_tasks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zone_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZoneActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager", - "shortName": "list_zone_actions" - }, - "description": "Sample for ListZoneActions", - "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zone_actions", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZoneActions", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZoneActions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZoneActionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager", - "shortName": "list_zone_actions" - }, - "description": "Sample for ListZoneActions", - "file": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZoneActions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zone_actions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.list_zones", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZones" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZonesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager", - "shortName": "list_zones" - }, - "description": "Sample for ListZones", - "file": "dataplex_v1_generated_dataplex_service_list_zones_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZones_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zones_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.list_zones", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.ListZones", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "ListZones" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListZonesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager", - "shortName": "list_zones" - }, - "description": "Sample for ListZones", - "file": "dataplex_v1_generated_dataplex_service_list_zones_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_ListZones_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_list_zones_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.run_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "RunTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", - "shortName": "run_task" - }, - "description": "Sample for RunTask", - "file": "dataplex_v1_generated_dataplex_service_run_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_RunTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_run_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.run_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.RunTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "RunTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.RunTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.RunTaskResponse", - "shortName": "run_task" - }, - "description": "Sample for RunTask", - "file": "dataplex_v1_generated_dataplex_service_run_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_RunTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_run_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_asset" - }, - "description": "Sample for UpdateAsset", - "file": "dataplex_v1_generated_dataplex_service_update_asset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_asset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_asset", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateAsset", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateAsset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateAssetRequest" - }, - { - "name": "asset", - "type": "google.cloud.dataplex_v1.types.Asset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_asset" - }, - "description": "Sample for UpdateAsset", - "file": "dataplex_v1_generated_dataplex_service_update_asset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateAsset_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_asset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_environment" - }, - "description": "Sample for UpdateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_update_environment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_environment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_environment", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateEnvironment", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateEnvironment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEnvironmentRequest" - }, - { - "name": "environment", - "type": "google.cloud.dataplex_v1.types.Environment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_environment" - }, - "description": "Sample for UpdateEnvironment", - "file": "dataplex_v1_generated_dataplex_service_update_environment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateEnvironment_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_environment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_lake" - }, - "description": "Sample for UpdateLake", - "file": "dataplex_v1_generated_dataplex_service_update_lake_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_lake_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_lake", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateLake", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateLake" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateLakeRequest" - }, - { - "name": "lake", - "type": "google.cloud.dataplex_v1.types.Lake" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_lake" - }, - "description": "Sample for UpdateLake", - "file": "dataplex_v1_generated_dataplex_service_update_lake_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateLake_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_lake_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_task" - }, - "description": "Sample for UpdateTask", - "file": "dataplex_v1_generated_dataplex_service_update_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_task", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateTask", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateTaskRequest" - }, - { - "name": "task", - "type": "google.cloud.dataplex_v1.types.Task" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_task" - }, - "description": "Sample for UpdateTask", - "file": "dataplex_v1_generated_dataplex_service_update_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateTask_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient", - "shortName": "DataplexServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceAsyncClient.update_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_zone" - }, - "description": "Sample for UpdateZone", - "file": "dataplex_v1_generated_dataplex_service_update_zone_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_zone_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient", - "shortName": "DataplexServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.DataplexServiceClient.update_zone", - "method": { - "fullName": "google.cloud.dataplex.v1.DataplexService.UpdateZone", - "service": { - "fullName": "google.cloud.dataplex.v1.DataplexService", - "shortName": "DataplexService" - }, - "shortName": "UpdateZone" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateZoneRequest" - }, - { - "name": "zone", - "type": "google.cloud.dataplex_v1.types.Zone" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_zone" - }, - "description": "Sample for UpdateZone", - "file": "dataplex_v1_generated_dataplex_service_update_zone_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_DataplexService_UpdateZone_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_dataplex_service_update_zone_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entity", - "type": "google.cloud.dataplex_v1.types.Entity" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "create_entity" - }, - "description": "Sample for CreateEntity", - "file": "dataplex_v1_generated_metadata_service_create_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntityRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entity", - "type": "google.cloud.dataplex_v1.types.Entity" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "create_entity" - }, - "description": "Sample for CreateEntity", - "file": "dataplex_v1_generated_metadata_service_create_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreateEntity_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.create_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreatePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "partition", - "type": "google.cloud.dataplex_v1.types.Partition" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "create_partition" - }, - "description": "Sample for CreatePartition", - "file": "dataplex_v1_generated_metadata_service_create_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.create_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.CreatePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "CreatePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.CreatePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "partition", - "type": "google.cloud.dataplex_v1.types.Partition" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "create_partition" - }, - "description": "Sample for CreatePartition", - "file": "dataplex_v1_generated_metadata_service_create_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_CreatePartition_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_create_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeleteEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_entity" - }, - "description": "Sample for DeleteEntity", - "file": "dataplex_v1_generated_metadata_service_delete_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeleteEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeleteEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_entity" - }, - "description": "Sample for DeleteEntity", - "file": "dataplex_v1_generated_metadata_service_delete_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeleteEntity_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.delete_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeletePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_partition" - }, - "description": "Sample for DeletePartition", - "file": "dataplex_v1_generated_metadata_service_delete_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.delete_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.DeletePartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "DeletePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.DeletePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_partition" - }, - "description": "Sample for DeletePartition", - "file": "dataplex_v1_generated_metadata_service_delete_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_DeletePartition_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_delete_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "get_entity" - }, - "description": "Sample for GetEntity", - "file": "dataplex_v1_generated_metadata_service_get_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntityRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "get_entity" - }, - "description": "Sample for GetEntity", - "file": "dataplex_v1_generated_metadata_service_get_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetEntity_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_entity_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.get_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetPartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "get_partition" - }, - "description": "Sample for GetPartition", - "file": "dataplex_v1_generated_metadata_service_get_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.get_partition", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.GetPartition", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "GetPartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.GetPartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Partition", - "shortName": "get_partition" - }, - "description": "Sample for GetPartition", - "file": "dataplex_v1_generated_metadata_service_get_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_GetPartition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_get_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_entities", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager", - "shortName": "list_entities" - }, - "description": "Sample for ListEntities", - "file": "dataplex_v1_generated_metadata_service_list_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_entities", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListEntities", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListEntitiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager", - "shortName": "list_entities" - }, - "description": "Sample for ListEntities", - "file": "dataplex_v1_generated_metadata_service_list_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListEntities_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.list_partitions", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListPartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager", - "shortName": "list_partitions" - }, - "description": "Sample for ListPartitions", - "file": "dataplex_v1_generated_metadata_service_list_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.list_partitions", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.ListPartitions", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "ListPartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.ListPartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager", - "shortName": "list_partitions" - }, - "description": "Sample for ListPartitions", - "file": "dataplex_v1_generated_metadata_service_list_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_ListPartitions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_list_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient", - "shortName": "MetadataServiceAsyncClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceAsyncClient.update_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "UpdateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "update_entity" - }, - "description": "Sample for UpdateEntity", - "file": "dataplex_v1_generated_metadata_service_update_entity_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_update_entity_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient", - "shortName": "MetadataServiceClient" - }, - "fullName": "google.cloud.dataplex_v1.MetadataServiceClient.update_entity", - "method": { - "fullName": "google.cloud.dataplex.v1.MetadataService.UpdateEntity", - "service": { - "fullName": "google.cloud.dataplex.v1.MetadataService", - "shortName": "MetadataService" - }, - "shortName": "UpdateEntity" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataplex_v1.types.UpdateEntityRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.dataplex_v1.types.Entity", - "shortName": "update_entity" - }, - "description": "Sample for UpdateEntity", - "file": "dataplex_v1_generated_metadata_service_update_entity_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_MetadataService_UpdateEntity_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 54, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 55, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataplex_v1_generated_metadata_service_update_entity_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py b/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py deleted file mode 100644 index e054db378114..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/scripts/fixup_dataplex_v1_keywords.py +++ /dev/null @@ -1,298 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dataplexCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('name', ), - 'cancel_metadata_job': ('name', ), - 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), - 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), - 'create_content': ('parent', 'content', 'validate_only', ), - 'create_data_attribute': ('parent', 'data_attribute_id', 'data_attribute', 'validate_only', ), - 'create_data_attribute_binding': ('parent', 'data_attribute_binding_id', 'data_attribute_binding', 'validate_only', ), - 'create_data_scan': ('parent', 'data_scan', 'data_scan_id', 'validate_only', ), - 'create_data_taxonomy': ('parent', 'data_taxonomy_id', 'data_taxonomy', 'validate_only', ), - 'create_encryption_config': ('parent', 'encryption_config_id', 'encryption_config', ), - 'create_entity': ('parent', 'entity', 'validate_only', ), - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), - 'create_entry_link': ('parent', 'entry_link_id', 'entry_link', ), - 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), - 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), - 'create_glossary': ('parent', 'glossary_id', 'glossary', 'validate_only', ), - 'create_glossary_category': ('parent', 'category_id', 'category', ), - 'create_glossary_term': ('parent', 'term_id', 'term', ), - 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), - 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), - 'create_partition': ('parent', 'partition', 'validate_only', ), - 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), - 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), - 'delete_aspect_type': ('name', 'etag', ), - 'delete_asset': ('name', ), - 'delete_content': ('name', ), - 'delete_data_attribute': ('name', 'etag', ), - 'delete_data_attribute_binding': ('name', 'etag', ), - 'delete_data_scan': ('name', 'force', ), - 'delete_data_taxonomy': ('name', 'etag', ), - 'delete_encryption_config': ('name', 'etag', ), - 'delete_entity': ('name', 'etag', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'etag', ), - 'delete_entry_link': ('name', ), - 'delete_entry_type': ('name', 'etag', ), - 'delete_environment': ('name', ), - 'delete_glossary': ('name', 'etag', ), - 'delete_glossary_category': ('name', ), - 'delete_glossary_term': ('name', ), - 'delete_lake': ('name', ), - 'delete_partition': ('name', 'etag', ), - 'delete_task': ('name', ), - 'delete_zone': ('name', ), - 'generate_data_quality_rules': ('name', ), - 'get_aspect_type': ('name', ), - 'get_asset': ('name', ), - 'get_content': ('name', 'view', ), - 'get_data_attribute': ('name', ), - 'get_data_attribute_binding': ('name', ), - 'get_data_scan': ('name', 'view', ), - 'get_data_scan_job': ('name', 'view', ), - 'get_data_taxonomy': ('name', ), - 'get_encryption_config': ('name', ), - 'get_entity': ('name', 'view', ), - 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), - 'get_entry_group': ('name', ), - 'get_entry_link': ('name', ), - 'get_entry_type': ('name', ), - 'get_environment': ('name', ), - 'get_glossary': ('name', ), - 'get_glossary_category': ('name', ), - 'get_glossary_term': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_job': ('name', ), - 'get_lake': ('name', ), - 'get_metadata_job': ('name', ), - 'get_partition': ('name', ), - 'get_task': ('name', ), - 'get_zone': ('name', ), - 'list_aspect_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_asset_actions': ('parent', 'page_size', 'page_token', ), - 'list_assets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_content': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_data_attribute_bindings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_attributes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_scan_jobs': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_data_scans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_data_taxonomies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_encryption_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_entities': ('parent', 'view', 'page_size', 'page_token', 'filter', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_glossary_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_glossary_terms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_jobs': ('parent', 'page_size', 'page_token', ), - 'list_lake_actions': ('parent', 'page_size', 'page_token', ), - 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_zone_actions': ('parent', 'page_size', 'page_token', ), - 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'lookup_entry': ('name', 'entry', 'view', 'aspect_types', 'paths', ), - 'run_data_scan': ('name', ), - 'run_task': ('name', 'labels', 'args', ), - 'search_entries': ('name', 'query', 'page_size', 'page_token', 'order_by', 'scope', 'semantic_search', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_aspect_type': ('aspect_type', 'update_mask', 'validate_only', ), - 'update_asset': ('update_mask', 'asset', 'validate_only', ), - 'update_content': ('update_mask', 'content', 'validate_only', ), - 'update_data_attribute': ('update_mask', 'data_attribute', 'validate_only', ), - 'update_data_attribute_binding': ('update_mask', 'data_attribute_binding', 'validate_only', ), - 'update_data_scan': ('data_scan', 'update_mask', 'validate_only', ), - 'update_data_taxonomy': ('update_mask', 'data_taxonomy', 'validate_only', ), - 'update_encryption_config': ('encryption_config', 'update_mask', ), - 'update_entity': ('entity', 'validate_only', ), - 'update_entry': ('entry', 'update_mask', 'allow_missing', 'delete_missing_aspects', 'aspect_keys', ), - 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), - 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), - 'update_environment': ('update_mask', 'environment', 'validate_only', ), - 'update_glossary': ('glossary', 'update_mask', 'validate_only', ), - 'update_glossary_category': ('category', 'update_mask', ), - 'update_glossary_term': ('term', 'update_mask', ), - 'update_lake': ('update_mask', 'lake', 'validate_only', ), - 'update_task': ('update_mask', 'task', 'validate_only', ), - 'update_zone': ('update_mask', 'zone', 'validate_only', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dataplexCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dataplex client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/setup.py b/owl-bot-staging/google-cloud-dataplex/v1/setup.py deleted file mode 100644 index df5e1552f733..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dataplex' - - -description = "Google Cloud Dataplex API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/dataplex/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt deleted file mode 100644 index 2010e549cceb..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,12 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt deleted file mode 100644 index 56affbd9bd75..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py deleted file mode 100644 index 191773d5572d..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py deleted file mode 100644 index 1fc6dcca2c3b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ /dev/null @@ -1,24562 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceAsyncClient -from google.cloud.dataplex_v1.services.catalog_service import CatalogServiceClient -from google.cloud.dataplex_v1.services.catalog_service import pagers -from google.cloud.dataplex_v1.services.catalog_service import transports -from google.cloud.dataplex_v1.types import catalog -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CatalogServiceClient._get_default_mtls_endpoint(None) is None - assert CatalogServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CatalogServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CatalogServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - CatalogServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CatalogServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CatalogServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - CatalogServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CatalogServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert CatalogServiceClient._get_client_cert_source(None, False) is None - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert CatalogServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert CatalogServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert CatalogServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CatalogServiceClient.DEFAULT_MTLS_ENDPOINT - assert CatalogServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert CatalogServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - CatalogServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert CatalogServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert CatalogServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert CatalogServiceClient._get_universe_domain(None, None) == CatalogServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - CatalogServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = CatalogServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = CatalogServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (CatalogServiceClient, "grpc"), - (CatalogServiceAsyncClient, "grpc_asyncio"), - (CatalogServiceClient, "rest"), -]) -def test_catalog_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CatalogServiceGrpcTransport, "grpc"), - (transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CatalogServiceRestTransport, "rest"), -]) -def test_catalog_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CatalogServiceClient, "grpc"), - (CatalogServiceAsyncClient, "grpc_asyncio"), - (CatalogServiceClient, "rest"), -]) -def test_catalog_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_catalog_service_client_get_transport_class(): - transport = CatalogServiceClient.get_transport_class() - available_transports = [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceRestTransport, - ] - assert transport in available_transports - - transport = CatalogServiceClient.get_transport_class("grpc") - assert transport == transports.CatalogServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest"), -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test_catalog_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CatalogServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "true"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "false"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", "true"), - (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_catalog_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - CatalogServiceClient, CatalogServiceAsyncClient -]) -@mock.patch.object(CatalogServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CatalogServiceAsyncClient)) -def test_catalog_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - CatalogServiceClient, CatalogServiceAsyncClient -]) -@mock.patch.object(CatalogServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceClient)) -@mock.patch.object(CatalogServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CatalogServiceAsyncClient)) -def test_catalog_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - default_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CatalogServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest"), -]) -def test_catalog_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CatalogServiceClient, transports.CatalogServiceRestTransport, "rest", None), -]) -def test_catalog_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_catalog_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CatalogServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", grpc_helpers), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_catalog_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryTypeRequest, - dict, -]) -def test_create_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryTypeRequest( - parent='parent_value', - entry_type_id='entry_type_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryTypeRequest( - parent='parent_value', - entry_type_id='entry_type_id_value', - ) - -def test_create_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc - request = {} - client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry_type] = mock_rpc - - request = {} - await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_entry_type_async_from_dict(): - await test_create_entry_type_async(request_type=dict) - -def test_create_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_type( - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].entry_type_id - mock_val = 'entry_type_id_value' - assert arg == mock_val - - -def test_create_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_type( - catalog.CreateEntryTypeRequest(), - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_type( - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].entry_type_id - mock_val = 'entry_type_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_type( - catalog.CreateEntryTypeRequest(), - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryTypeRequest, - dict, -]) -def test_update_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryTypeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryTypeRequest( - ) - -def test_update_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc - request = {} - client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry_type] = mock_rpc - - request = {} - await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_entry_type_async_from_dict(): - await test_update_entry_type_async(request_type=dict) - -def test_update_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryTypeRequest() - - request.entry_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_type.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryTypeRequest() - - request.entry_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_type.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_type( - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_type( - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_type - mock_val = catalog.EntryType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryTypeRequest, - dict, -]) -def test_delete_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryTypeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryTypeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc - request = {} - client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry_type] = mock_rpc - - request = {} - await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_entry_type_async_from_dict(): - await test_delete_entry_type_async(request_type=dict) - -def test_delete_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryTypesRequest, - dict, -]) -def test_list_entry_types(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_entry_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntryTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entry_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntryTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_entry_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc - request = {} - client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entry_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entry_types] = mock_rpc - - request = {} - await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entry_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryTypesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_entry_types_async_from_dict(): - await test_list_entry_types_async(request_type=dict) - -def test_list_entry_types_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_types_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) - await client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_types_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_types_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_types( - catalog.ListEntryTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_types_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_types_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_types( - catalog.ListEntryTypesRequest(), - parent='parent_value', - ) - - -def test_list_entry_types_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_types(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryType) - for i in results) -def test_list_entry_types_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_types_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.EntryType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_types_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_types(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryTypeRequest, - dict, -]) -def test_get_entry_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - ) - response = client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.type_aliases == ['type_aliases_value'] - assert response.platform == 'platform_value' - assert response.system == 'system_value' - - -def test_get_entry_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryTypeRequest( - name='name_value', - ) - -def test_get_entry_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc - request = {} - client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry_type] = mock_rpc - - request = {} - await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - )) - response = await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.type_aliases == ['type_aliases_value'] - assert response.platform == 'platform_value' - assert response.system == 'system_value' - - -@pytest.mark.asyncio -async def test_get_entry_type_async_from_dict(): - await test_get_entry_type_async(request_type=dict) - -def test_get_entry_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) - await client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_type( - catalog.GetEntryTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_type( - catalog.GetEntryTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateAspectTypeRequest, - dict, -]) -def test_create_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateAspectTypeRequest( - parent='parent_value', - aspect_type_id='aspect_type_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateAspectTypeRequest( - parent='parent_value', - aspect_type_id='aspect_type_id_value', - ) - -def test_create_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc - request = {} - client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_aspect_type] = mock_rpc - - request = {} - await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_aspect_type_async_from_dict(): - await test_create_aspect_type_async(request_type=dict) - -def test_create_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateAspectTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateAspectTypeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_aspect_type( - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].aspect_type_id - mock_val = 'aspect_type_id_value' - assert arg == mock_val - - -def test_create_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - -@pytest.mark.asyncio -async def test_create_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_aspect_type( - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].aspect_type_id - mock_val = 'aspect_type_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateAspectTypeRequest, - dict, -]) -def test_update_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateAspectTypeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateAspectTypeRequest( - ) - -def test_update_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc - request = {} - client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_aspect_type] = mock_rpc - - request = {} - await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_aspect_type_async_from_dict(): - await test_update_aspect_type_async(request_type=dict) - -def test_update_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateAspectTypeRequest() - - request.aspect_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'aspect_type.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateAspectTypeRequest() - - request.aspect_type.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'aspect_type.name=name_value', - ) in kw['metadata'] - - -def test_update_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_aspect_type( - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_aspect_type( - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].aspect_type - mock_val = catalog.AspectType(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteAspectTypeRequest, - dict, -]) -def test_delete_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteAspectTypeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteAspectTypeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc - request = {} - client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_aspect_type] = mock_rpc - - request = {} - await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_aspect_type_async_from_dict(): - await test_delete_aspect_type_async(request_type=dict) - -def test_delete_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListAspectTypesRequest, - dict, -]) -def test_list_aspect_types(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListAspectTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_aspect_types_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListAspectTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_aspect_types(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListAspectTypesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_aspect_types_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_aspect_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc - request = {} - client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_aspect_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_aspect_types_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_aspect_types in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_aspect_types] = mock_rpc - - request = {} - await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_aspect_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_aspect_types_async(transport: str = 'grpc_asyncio', request_type=catalog.ListAspectTypesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListAspectTypesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_aspect_types_async_from_dict(): - await test_list_aspect_types_async(request_type=dict) - -def test_list_aspect_types_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListAspectTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_aspect_types_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListAspectTypesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) - await client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_aspect_types_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_aspect_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_aspect_types_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_aspect_types( - catalog.ListAspectTypesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_aspect_types_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListAspectTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_aspect_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_aspect_types_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_aspect_types( - catalog.ListAspectTypesRequest(), - parent='parent_value', - ) - - -def test_list_aspect_types_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_aspect_types(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.AspectType) - for i in results) -def test_list_aspect_types_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_aspect_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_aspect_types_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_aspect_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.AspectType) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_aspect_types_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_aspect_types(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetAspectTypeRequest, - dict, -]) -def test_get_aspect_type(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - response = client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -def test_get_aspect_type_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetAspectTypeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_aspect_type(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetAspectTypeRequest( - name='name_value', - ) - -def test_get_aspect_type_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc - request = {} - client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_aspect_type_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_aspect_type in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_aspect_type] = mock_rpc - - request = {} - await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_aspect_type_async(transport: str = 'grpc_asyncio', request_type=catalog.GetAspectTypeRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - response = await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetAspectTypeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.asyncio -async def test_get_aspect_type_async_from_dict(): - await test_get_aspect_type_async(request_type=dict) - -def test_get_aspect_type_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_aspect_type_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetAspectTypeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) - await client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_aspect_type_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_aspect_type_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_aspect_type_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.AspectType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_aspect_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_aspect_type_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryGroupRequest, - dict, -]) -def test_create_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryGroupRequest( - parent='parent_value', - entry_group_id='entry_group_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryGroupRequest( - parent='parent_value', - entry_group_id='entry_group_id_value', - ) - -def test_create_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc - request = {} - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry_group] = mock_rpc - - request = {} - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_entry_group_async_from_dict(): - await test_create_entry_group_async(request_type=dict) - -def test_create_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_group( - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - - -def test_create_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_group( - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryGroupRequest, - dict, -]) -def test_update_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryGroupRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryGroupRequest( - ) - -def test_update_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc - request = {} - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry_group] = mock_rpc - - request = {} - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_entry_group_async_from_dict(): - await test_update_entry_group_async(request_type=dict) - -def test_update_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_group( - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_group( - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = catalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryGroupRequest, - dict, -]) -def test_delete_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryGroupRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryGroupRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc - request = {} - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry_group] = mock_rpc - - request = {} - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_entry_group_async_from_dict(): - await test_delete_entry_group_async(request_type=dict) - -def test_delete_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryGroupsRequest, - dict, -]) -def test_list_entry_groups(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryGroupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_entry_groups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntryGroupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entry_groups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntryGroupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_entry_groups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_groups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc - request = {} - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_groups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_groups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entry_groups in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entry_groups] = mock_rpc - - request = {} - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entry_groups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntryGroupsRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntryGroupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_from_dict(): - await test_list_entry_groups_async(request_type=dict) - -def test_list_entry_groups_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_groups_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_groups_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_groups( - catalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntryGroupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_groups( - catalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - - -def test_list_entry_groups_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_groups(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryGroup) - for i in results) -def test_list_entry_groups_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_groups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_groups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.EntryGroup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_groups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryGroupRequest, - dict, -]) -def test_get_entry_group(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - response = client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -def test_get_entry_group_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryGroupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry_group(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryGroupRequest( - name='name_value', - ) - -def test_get_entry_group_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc - request = {} - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry_group in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry_group] = mock_rpc - - request = {} - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryGroupRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - response = await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryGroupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.asyncio -async def test_get_entry_group_async_from_dict(): - await test_get_entry_group_async(request_type=dict) - -def test_get_entry_group_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_group_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_group_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_group_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_group( - catalog.GetEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_group( - catalog.GetEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryRequest, - dict, -]) -def test_create_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_create_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryRequest( - parent='parent_value', - entry_id='entry_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryRequest( - parent='parent_value', - entry_id='entry_id_value', - ) - -def test_create_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc - request = {} - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry] = mock_rpc - - request = {} - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_create_entry_async_from_dict(): - await test_create_entry_async(request_type=dict) - -def test_create_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry( - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - - -def test_create_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry( - catalog.CreateEntryRequest(), - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry( - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry( - catalog.CreateEntryRequest(), - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryRequest, - dict, -]) -def test_update_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_update_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.UpdateEntryRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.UpdateEntryRequest( - ) - -def test_update_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc - request = {} - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entry] = mock_rpc - - request = {} - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.UpdateEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.UpdateEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_update_entry_async_from_dict(): - await test_update_entry_async(request_type=dict) - -def test_update_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry( - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry( - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = catalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryRequest, - dict, -]) -def test_delete_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_delete_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryRequest( - name='name_value', - ) - -def test_delete_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc - request = {} - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry] = mock_rpc - - request = {} - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_delete_entry_async_from_dict(): - await test_delete_entry_async(request_type=dict) - -def test_delete_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry( - catalog.DeleteEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry( - catalog.DeleteEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntriesRequest, - dict, -]) -def test_list_entries(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entries_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListEntriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entries(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListEntriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_entries_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc - request = {} - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entries in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entries] = mock_rpc - - request = {} - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.ListEntriesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entries_async_from_dict(): - await test_list_entries_async(request_type=dict) - -def test_list_entries_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entries_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entries_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entries_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entries( - catalog.ListEntriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entries_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entries_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entries( - catalog.ListEntriesRequest(), - parent='parent_value', - ) - - -def test_list_entries_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entries(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.Entry) - for i in results) -def test_list_entries_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entries_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.Entry) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entries_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryRequest, - dict, -]) -def test_get_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_get_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryRequest( - name='name_value', - ) - -def test_get_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc - request = {} - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry] = mock_rpc - - request = {} - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_get_entry_async_from_dict(): - await test_get_entry_async(request_type=dict) - -def test_get_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry( - catalog.GetEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry( - catalog.GetEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.LookupEntryRequest, - dict, -]) -def test_lookup_entry(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - response = client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.LookupEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -def test_lookup_entry_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.LookupEntryRequest( - name='name_value', - entry='entry_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.lookup_entry(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.LookupEntryRequest( - name='name_value', - entry='entry_value', - ) - -def test_lookup_entry_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lookup_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc - request = {} - client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_lookup_entry_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.lookup_entry in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.lookup_entry] = mock_rpc - - request = {} - await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lookup_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=catalog.LookupEntryRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - response = await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.LookupEntryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.asyncio -async def test_lookup_entry_async_from_dict(): - await test_lookup_entry_async(request_type=dict) - -def test_lookup_entry_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.LookupEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_lookup_entry_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.LookupEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry()) - await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - catalog.SearchEntriesRequest, - dict, -]) -def test_search_entries(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.SearchEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_search_entries_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.SearchEntriesRequest( - name='name_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - scope='scope_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_entries(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.SearchEntriesRequest( - name='name_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - scope='scope_value', - ) - -def test_search_entries_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc - request = {} - client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_entries in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_entries] = mock_rpc - - request = {} - await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_entries_async(transport: str = 'grpc_asyncio', request_type=catalog.SearchEntriesRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.SearchEntriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesAsyncPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_search_entries_async_from_dict(): - await test_search_entries_async(request_type=dict) - -def test_search_entries_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.SearchEntriesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_entries_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.SearchEntriesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) - await client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_search_entries_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_entries( - name='name_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_entries_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_entries( - catalog.SearchEntriesRequest(), - name='name_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_entries_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.SearchEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_entries( - name='name_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_entries_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_entries( - catalog.SearchEntriesRequest(), - name='name_value', - query='query_value', - ) - - -def test_search_entries_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('name', ''), - )), - ) - pager = client.search_entries(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) - for i in results) -def test_search_entries_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_entries_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_entries_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.CreateMetadataJobRequest, - dict, -]) -def test_create_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateMetadataJobRequest( - parent='parent_value', - metadata_job_id='metadata_job_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateMetadataJobRequest( - parent='parent_value', - metadata_job_id='metadata_job_id_value', - ) - -def test_create_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc - request = {} - client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_metadata_job] = mock_rpc - - request = {} - await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_metadata_job_async_from_dict(): - await test_create_metadata_job_async(request_type=dict) - -def test_create_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateMetadataJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateMetadataJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_metadata_job( - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].metadata_job - mock_val = catalog.MetadataJob(name='name_value') - assert arg == mock_val - arg = args[0].metadata_job_id - mock_val = 'metadata_job_id_value' - assert arg == mock_val - - -def test_create_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_metadata_job( - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].metadata_job - mock_val = catalog.MetadataJob(name='name_value') - assert arg == mock_val - arg = args[0].metadata_job_id - mock_val = 'metadata_job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetMetadataJobRequest, - dict, -]) -def test_get_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - ) - response = client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.type_ == catalog.MetadataJob.Type.IMPORT - - -def test_get_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetMetadataJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetMetadataJobRequest( - name='name_value', - ) - -def test_get_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc - request = {} - client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_metadata_job] = mock_rpc - - request = {} - await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.GetMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - )) - response = await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.type_ == catalog.MetadataJob.Type.IMPORT - - -@pytest.mark.asyncio -async def test_get_metadata_job_async_from_dict(): - await test_get_metadata_job_async(request_type=dict) - -def test_get_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) - await client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.MetadataJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListMetadataJobsRequest, - dict, -]) -def test_list_metadata_jobs(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.ListMetadataJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.ListMetadataJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_metadata_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.ListMetadataJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_metadata_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_metadata_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc - request = {} - client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_metadata_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_metadata_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_metadata_jobs] = mock_rpc - - request = {} - await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_metadata_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async(transport: str = 'grpc_asyncio', request_type=catalog.ListMetadataJobsRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.ListMetadataJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_from_dict(): - await test_list_metadata_jobs_async(request_type=dict) - -def test_list_metadata_jobs_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListMetadataJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.ListMetadataJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) - await client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_metadata_jobs_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_metadata_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_metadata_jobs_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_metadata_jobs_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.ListMetadataJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_metadata_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_metadata_jobs_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent='parent_value', - ) - - -def test_list_metadata_jobs_pager(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.MetadataJob) - for i in results) -def test_list_metadata_jobs_pages(transport_name: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_metadata_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_pager(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_metadata_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, catalog.MetadataJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_metadata_jobs_async_pages(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_metadata_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - catalog.CancelMetadataJobRequest, - dict, -]) -def test_cancel_metadata_job(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CancelMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CancelMetadataJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_metadata_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CancelMetadataJobRequest( - name='name_value', - ) - -def test_cancel_metadata_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc - request = {} - client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_metadata_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_metadata_job] = mock_rpc - - request = {} - await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async(transport: str = 'grpc_asyncio', request_type=catalog.CancelMetadataJobRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CancelMetadataJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_metadata_job_async_from_dict(): - await test_cancel_metadata_job_async(request_type=dict) - -def test_cancel_metadata_job_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CancelMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = None - client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_metadata_job_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CancelMetadataJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_metadata_job_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_metadata_job_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_metadata_job_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_metadata_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_metadata_job_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryLinkRequest, - dict, -]) -def test_create_entry_link(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - response = client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -def test_create_entry_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.CreateEntryLinkRequest( - parent='parent_value', - entry_link_id='entry_link_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entry_link(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.CreateEntryLinkRequest( - parent='parent_value', - entry_link_id='entry_link_id_value', - ) - -def test_create_entry_link_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_link] = mock_rpc - request = {} - client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entry_link in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entry_link] = mock_rpc - - request = {} - await client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.CreateEntryLinkRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - response = await client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.CreateEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.asyncio -async def test_create_entry_link_async_from_dict(): - await test_create_entry_link_async(request_type=dict) - -def test_create_entry_link_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryLinkRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_link_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.CreateEntryLinkRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - await client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_link_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_link( - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_link - mock_val = catalog.EntryLink(name='name_value') - assert arg == mock_val - arg = args[0].entry_link_id - mock_val = 'entry_link_id_value' - assert arg == mock_val - - -def test_create_entry_link_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_link( - catalog.CreateEntryLinkRequest(), - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - -@pytest.mark.asyncio -async def test_create_entry_link_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_link( - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_link - mock_val = catalog.EntryLink(name='name_value') - assert arg == mock_val - arg = args[0].entry_link_id - mock_val = 'entry_link_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_link_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_link( - catalog.CreateEntryLinkRequest(), - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryLinkRequest, - dict, -]) -def test_delete_entry_link(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - response = client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -def test_delete_entry_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.DeleteEntryLinkRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entry_link(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.DeleteEntryLinkRequest( - name='name_value', - ) - -def test_delete_entry_link_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_link] = mock_rpc - request = {} - client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entry_link in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entry_link] = mock_rpc - - request = {} - await client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.DeleteEntryLinkRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - response = await client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.DeleteEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.asyncio -async def test_delete_entry_link_async_from_dict(): - await test_delete_entry_link_async(request_type=dict) - -def test_delete_entry_link_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryLinkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_link_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.DeleteEntryLinkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - await client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_link_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_link( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_link_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_link( - catalog.DeleteEntryLinkRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_link_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_link( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_link_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_link( - catalog.DeleteEntryLinkRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryLinkRequest, - dict, -]) -def test_get_entry_link(request_type, transport: str = 'grpc'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - response = client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -def test_get_entry_link_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = catalog.GetEntryLinkRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entry_link(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == catalog.GetEntryLinkRequest( - name='name_value', - ) - -def test_get_entry_link_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc - request = {} - client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entry_link in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entry_link] = mock_rpc - - request = {} - await client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entry_link_async(transport: str = 'grpc_asyncio', request_type=catalog.GetEntryLinkRequest): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - response = await client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = catalog.GetEntryLinkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.asyncio -async def test_get_entry_link_async_from_dict(): - await test_get_entry_link_async(request_type=dict) - -def test_get_entry_link_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryLinkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_link_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = catalog.GetEntryLinkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - await client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_link_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_link( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_link_flattened_error(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_link( - catalog.GetEntryLinkRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_link_flattened_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = catalog.EntryLink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_link( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_link_flattened_error_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_link( - catalog.GetEntryLinkRequest(), - name='name_value', - ) - - -def test_create_entry_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_type] = mock_rpc - - request = {} - client.create_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_entry_type_rest_required_fields(request_type=catalog.CreateEntryTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_type_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "entryTypeId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["entryTypeId"] = 'entry_type_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_type_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == 'entry_type_id_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_entry_type(request) - - expected_params = [ - ( - "entryTypeId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("entryTypeId", "validateOnly", )) & set(("parent", "entryTypeId", "entryType", ))) - - -def test_create_entry_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_entry_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1]) - - -def test_create_entry_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_type( - catalog.CreateEntryTypeRequest(), - parent='parent_value', - entry_type=catalog.EntryType(name='name_value'), - entry_type_id='entry_type_id_value', - ) - - -def test_update_entry_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_type] = mock_rpc - - request = {} - client.update_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_entry_type_rest_required_fields(request_type=catalog.UpdateEntryTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_entry_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("entryType", "updateMask", ))) - - -def test_update_entry_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_entry_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) - - -def test_update_entry_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_entry_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_type] = mock_rpc - - request = {} - client.delete_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_entry_type_rest_required_fields(request_type=catalog.DeleteEntryTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_entry_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_entry_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_entry_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) - - -def test_delete_entry_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), - name='name_value', - ) - - -def test_list_entry_types_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_types] = mock_rpc - - request = {} - client.list_entry_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_entry_types_rest_required_fields(request_type=catalog.ListEntryTypesRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_entry_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_entry_types_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_entry_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_entry_types_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_entry_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1]) - - -def test_list_entry_types_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_types( - catalog.ListEntryTypesRequest(), - parent='parent_value', - ) - - -def test_list_entry_types_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token='abc', - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token='def', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token='ghi', - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_entry_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryType) - for i in results) - - pages = list(client.list_entry_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc - - request = {} - client.get_entry_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_entry_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_entry_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_entry_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1]) - - -def test_get_entry_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_type( - catalog.GetEntryTypeRequest(), - name='name_value', - ) - - -def test_create_aspect_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_aspect_type] = mock_rpc - - request = {} - client.create_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_aspect_type_rest_required_fields(request_type=catalog.CreateAspectTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["aspect_type_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "aspectTypeId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_aspect_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["aspectTypeId"] = 'aspect_type_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_aspect_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("aspect_type_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == 'aspect_type_id_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_aspect_type(request) - - expected_params = [ - ( - "aspectTypeId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_aspect_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("aspectTypeId", "validateOnly", )) & set(("parent", "aspectTypeId", "aspectType", ))) - - -def test_create_aspect_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_aspect_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1]) - - -def test_create_aspect_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent='parent_value', - aspect_type=catalog.AspectType(name='name_value'), - aspect_type_id='aspect_type_id_value', - ) - - -def test_update_aspect_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_aspect_type] = mock_rpc - - request = {} - client.update_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_aspect_type_rest_required_fields(request_type=catalog.UpdateAspectTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_aspect_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_aspect_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_aspect_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_aspect_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("aspectType", "updateMask", ))) - - -def test_update_aspect_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_aspect_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) - - -def test_update_aspect_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_aspect_type] = mock_rpc - - request = {} - client.delete_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_aspect_type_rest_required_fields(request_type=catalog.DeleteAspectTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_aspect_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_aspect_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_aspect_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_aspect_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_aspect_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_aspect_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) - - -def test_delete_aspect_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name='name_value', - ) - - -def test_list_aspect_types_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_aspect_types in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_aspect_types] = mock_rpc - - request = {} - client.list_aspect_types(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_aspect_types(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_aspect_types_rest_required_fields(request_type=catalog.ListAspectTypesRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_aspect_types._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_aspect_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_aspect_types(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_aspect_types_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_aspect_types._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_aspect_types_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_aspect_types(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1]) - - -def test_list_aspect_types_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_aspect_types( - catalog.ListAspectTypesRequest(), - parent='parent_value', - ) - - -def test_list_aspect_types_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token='abc', - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token='def', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token='ghi', - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_aspect_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.AspectType) - for i in results) - - pages = list(client.list_aspect_types(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_aspect_type_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_aspect_type in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc - - request = {} - client.get_aspect_type(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_aspect_type(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_aspect_type_rest_required_fields(request_type=catalog.GetAspectTypeRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_aspect_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_aspect_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_aspect_type(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_aspect_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_aspect_type_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_aspect_type(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1]) - - -def test_get_aspect_type_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name='name_value', - ) - - -def test_create_entry_group_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_group] = mock_rpc - - request = {} - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_entry_group_rest_required_fields(request_type=catalog.CreateEntryGroupRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_group_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "entryGroupId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["entryGroupId"] = 'entry_group_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_group_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == 'entry_group_id_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_entry_group(request) - - expected_params = [ - ( - "entryGroupId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_entry_group_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("entryGroupId", "validateOnly", )) & set(("parent", "entryGroupId", "entryGroup", ))) - - -def test_create_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_entry_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1]) - - -def test_create_entry_group_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group=catalog.EntryGroup(name='name_value'), - entry_group_id='entry_group_id_value', - ) - - -def test_update_entry_group_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry_group] = mock_rpc - - request = {} - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_entry_group_rest_required_fields(request_type=catalog.UpdateEntryGroupRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_entry_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_entry_group_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("entryGroup", "updateMask", ))) - - -def test_update_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_entry_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) - - -def test_update_entry_group_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_entry_group_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_group] = mock_rpc - - request = {} - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_entry_group_rest_required_fields(request_type=catalog.DeleteEntryGroupRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_entry_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_entry_group_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_entry_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) - - -def test_delete_entry_group_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), - name='name_value', - ) - - -def test_list_entry_groups_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entry_groups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entry_groups] = mock_rpc - - request = {} - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entry_groups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_entry_groups_rest_required_fields(request_type=catalog.ListEntryGroupsRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_groups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entry_groups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_entry_groups(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_entry_groups_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_entry_groups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_entry_groups_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_entry_groups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1]) - - -def test_list_entry_groups_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_groups( - catalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - - -def test_list_entry_groups_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token='abc', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token='ghi', - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_entry_groups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryGroup) - for i in results) - - pages = list(client.list_entry_groups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_group_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_group in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc - - request = {} - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_group(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_entry_group_rest_required_fields(request_type=catalog.GetEntryGroupRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_entry_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_entry_group_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_entry_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1]) - - -def test_get_entry_group_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_group( - catalog.GetEntryGroupRequest(), - name='name_value', - ) - - -def test_create_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc - - request = {} - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "entryId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == request_init["entry_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["entryId"] = 'entry_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == 'entry_id_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_entry(request) - - expected_params = [ - ( - "entryId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_entry_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_entry._get_unset_required_fields({}) - assert set(unset_fields) == (set(("entryId", )) & set(("parent", "entryId", "entry", ))) - - -def test_create_entry_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_entry(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1]) - - -def test_create_entry_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry( - catalog.CreateEntryRequest(), - parent='parent_value', - entry=catalog.Entry(name='name_value'), - entry_id='entry_id_value', - ) - - -def test_update_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc - - request = {} - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "aspect_keys", "delete_missing_aspects", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_entry(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_entry_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_entry._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "aspectKeys", "deleteMissingAspects", "updateMask", )) & set(("entry", ))) - - -def test_update_entry_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - - # get arguments that satisfy an http rule for this method - sample_request = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_entry(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) - - -def test_update_entry_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc - - request = {} - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_entry(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_entry_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_entry._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_entry_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_entry(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) - - -def test_delete_entry_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry( - catalog.DeleteEntryRequest(), - name='name_value', - ) - - -def test_list_entries_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc - - request = {} - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entries._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_entries(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_entries_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_entries_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1]) - - -def test_list_entries_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entries( - catalog.ListEntriesRequest(), - parent='parent_value', - ) - - -def test_list_entries_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token='abc', - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token='ghi', - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - pager = client.list_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.Entry) - for i in results) - - pages = list(client.list_entries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc - - request = {} - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("aspect_types", "paths", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_entry(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_entry_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_entry._get_unset_required_fields({}) - assert set(unset_fields) == (set(("aspectTypes", "paths", "view", )) & set(("name", ))) - - -def test_get_entry_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_entry(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1]) - - -def test_get_entry_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry( - catalog.GetEntryRequest(), - name='name_value', - ) - - -def test_lookup_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lookup_entry in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc - - request = {} - client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_entry(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["entry"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "entry" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup_entry._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "entry" in jsonified_request - assert jsonified_request["entry"] == request_init["entry"] - - jsonified_request["name"] = 'name_value' - jsonified_request["entry"] = 'entry_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("aspect_types", "entry", "paths", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "entry" in jsonified_request - assert jsonified_request["entry"] == 'entry_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.Entry() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.lookup_entry(request) - - expected_params = [ - ( - "entry", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_lookup_entry_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.lookup_entry._get_unset_required_fields({}) - assert set(unset_fields) == (set(("aspectTypes", "entry", "paths", "view", )) & set(("name", "entry", ))) - - -def test_search_entries_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_entries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc - - request = {} - client.search_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_entries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["query"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "query" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "query" in jsonified_request - assert jsonified_request["query"] == request_init["query"] - - jsonified_request["name"] = 'name_value' - jsonified_request["query"] = 'query_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_entries._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", "query", "scope", "semantic_search", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "query" in jsonified_request - assert jsonified_request["query"] == 'query_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_entries(request) - - expected_params = [ - ( - "query", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_entries_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", "query", "scope", "semanticSearch", )) & set(("name", "query", ))) - - -def test_search_entries_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - query='query_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*}:searchEntries" % client.transport._host, args[1]) - - -def test_search_entries_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_entries( - catalog.SearchEntriesRequest(), - name='name_value', - query='query_value', - ) - - -def test_search_entries_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - next_page_token='abc', - ), - catalog.SearchEntriesResponse( - results=[], - next_page_token='def', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - ], - next_page_token='ghi', - ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'name': 'projects/sample1/locations/sample2'} - - pager = client.search_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) - for i in results) - - pages = list(client.search_entries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_metadata_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_metadata_job] = mock_rpc - - request = {} - client.create_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_metadata_job_rest_required_fields(request_type=catalog.CreateMetadataJobRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_metadata_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_metadata_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("metadata_job_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_metadata_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_metadata_job_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("metadataJobId", "validateOnly", )) & set(("parent", "metadataJob", ))) - - -def test_create_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_metadata_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1]) - - -def test_create_metadata_job_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent='parent_value', - metadata_job=catalog.MetadataJob(name='name_value'), - metadata_job_id='metadata_job_id_value', - ) - - -def test_get_metadata_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_metadata_job] = mock_rpc - - request = {} - client.get_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_metadata_job_rest_required_fields(request_type=catalog.GetMetadataJobRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_metadata_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_metadata_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_metadata_job_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_metadata_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/metadataJobs/*}" % client.transport._host, args[1]) - - -def test_get_metadata_job_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name='name_value', - ) - - -def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_metadata_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_metadata_jobs] = mock_rpc - - request = {} - client.list_metadata_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_metadata_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_metadata_jobs_rest_required_fields(request_type=catalog.ListMetadataJobsRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_metadata_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_metadata_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_metadata_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_metadata_jobs_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_metadata_jobs_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_metadata_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1]) - - -def test_list_metadata_jobs_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent='parent_value', - ) - - -def test_list_metadata_jobs_rest_pager(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - next_page_token='abc', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], - next_page_token='def', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - ], - next_page_token='ghi', - ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_metadata_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.MetadataJob) - for i in results) - - pages = list(client.list_metadata_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_metadata_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_metadata_job] = mock_rpc - - request = {} - client.cancel_metadata_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_metadata_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_metadata_job_rest_required_fields(request_type=catalog.CancelMetadataJobRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_metadata_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_metadata_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_metadata_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_metadata_job_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_cancel_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.cancel_metadata_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" % client.transport._host, args[1]) - - -def test_cancel_metadata_job_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name='name_value', - ) - - -def test_create_entry_link_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entry_link] = mock_rpc - - request = {} - client.create_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_entry_link_rest_required_fields(request_type=catalog.CreateEntryLinkRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_link_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "entryLinkId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "entryLinkId" in jsonified_request - assert jsonified_request["entryLinkId"] == request_init["entry_link_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["entryLinkId"] = 'entry_link_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entry_link._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_link_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "entryLinkId" in jsonified_request - assert jsonified_request["entryLinkId"] == 'entry_link_id_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_entry_link(request) - - expected_params = [ - ( - "entryLinkId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_entry_link_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_entry_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(("entryLinkId", )) & set(("parent", "entryLinkId", "entryLink", ))) - - -def test_create_entry_link_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_entry_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks" % client.transport._host, args[1]) - - -def test_create_entry_link_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_link( - catalog.CreateEntryLinkRequest(), - parent='parent_value', - entry_link=catalog.EntryLink(name='name_value'), - entry_link_id='entry_link_id_value', - ) - - -def test_delete_entry_link_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entry_link] = mock_rpc - - request = {} - client.delete_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_entry_link_rest_required_fields(request_type=catalog.DeleteEntryLinkRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entry_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_entry_link(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_entry_link_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_entry_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_entry_link_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_entry_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" % client.transport._host, args[1]) - - -def test_delete_entry_link_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_link( - catalog.DeleteEntryLinkRequest(), - name='name_value', - ) - - -def test_get_entry_link_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entry_link in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc - - request = {} - client.get_entry_link(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entry_link(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_entry_link_rest_required_fields(request_type=catalog.GetEntryLinkRequest): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entry_link._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_entry_link(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_entry_link_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_entry_link._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_entry_link_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_entry_link(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" % client.transport._host, args[1]) - - -def test_get_entry_link_rest_flattened_error(transport: str = 'rest'): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_link( - catalog.GetEntryLinkRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CatalogServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CatalogServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - transports.CatalogServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = CatalogServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_aspect_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_groups_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.create_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.update_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.get_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lookup_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - call.return_value = catalog.Entry() - client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_metadata_jobs_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - call.return_value = None - client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_link_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.create_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_link_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.delete_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_link_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - call.return_value = catalog.EntryLink() - client.get_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryLinkRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - )) - await client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_aspect_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_aspect_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - await client.get_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_groups_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_entry_groups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - )) - await client.get_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.create_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.update_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.get_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_lookup_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - )) - await client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.search_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - )) - await client.get_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_metadata_jobs_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_metadata_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_link_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - await client.create_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_link_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - await client.delete_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_link_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - )) - await client.get_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryLinkRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = CatalogServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_entry_type_rest_bad_request(request_type=catalog.CreateEntryTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryTypeRequest, - dict, -]) -def test_create_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["entry_type"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'type_aliases': ['type_aliases_value1', 'type_aliases_value2'], 'platform': 'platform_value', 'system': 'system_value', 'required_aspects': [{'type_': 'type__value'}], 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] - else: - del request_init["entry_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.CreateEntryTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_entry_type_rest_bad_request(request_type=catalog.UpdateEntryTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryTypeRequest, - dict, -]) -def test_update_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'entry_type': {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'}} - request_init["entry_type"] = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'type_aliases': ['type_aliases_value1', 'type_aliases_value2'], 'platform': 'platform_value', 'system': 'system_value', 'required_aspects': [{'type_': 'type__value'}], 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] - else: - del request_init["entry_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.UpdateEntryTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_type_rest_bad_request(request_type=catalog.DeleteEntryTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryTypeRequest, - dict, -]) -def test_delete_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.DeleteEntryTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_types(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryTypesRequest, - dict, -]) -def test_list_entry_types_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_types_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_types") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entry_types") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryTypesResponse.to_json(catalog.ListEntryTypesResponse()) - req.return_value.content = return_value - - request = catalog.ListEntryTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.ListEntryTypesResponse() - post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata - - client.list_entry_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryTypeRequest, - dict, -]) -def test_get_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - type_aliases=['type_aliases_value'], - platform='platform_value', - system='system_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.type_aliases == ['type_aliases_value'] - assert response.platform == 'platform_value' - assert response.system == 'system_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryType.to_json(catalog.EntryType()) - req.return_value.content = return_value - - request = catalog.GetEntryTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.EntryType() - post_with_metadata.return_value = catalog.EntryType(), metadata - - client.get_entry_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_aspect_type_rest_bad_request(request_type=catalog.CreateAspectTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_aspect_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateAspectTypeRequest, - dict, -]) -def test_create_aspect_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["aspect_type"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}, 'metadata_template': {'index': 536, 'name': 'name_value', 'type_': 'type__value', 'record_fields': {}, 'enum_values': [{'index': 536, 'name': 'name_value', 'deprecated': 'deprecated_value'}], 'map_items': {}, 'array_items': {}, 'type_id': 'type_id_value', 'type_ref': 'type_ref_value', 'constraints': {'required': True}, 'annotations': {'deprecated': 'deprecated_value', 'display_name': 'display_name_value', 'description': 'description_value', 'display_order': 1393, 'string_type': 'string_type_value', 'string_values': ['string_values_value1', 'string_values_value2']}}, 'transfer_status': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] - else: - del request_init["aspect_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_aspect_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_aspect_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_aspect_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_aspect_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_aspect_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateAspectTypeRequest.pb(catalog.CreateAspectTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.CreateAspectTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_aspect_type_rest_bad_request(request_type=catalog.UpdateAspectTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_aspect_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateAspectTypeRequest, - dict, -]) -def test_update_aspect_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'aspect_type': {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'}} - request_init["aspect_type"] = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'authorization': {'alternate_use_permission': 'alternate_use_permission_value'}, 'metadata_template': {'index': 536, 'name': 'name_value', 'type_': 'type__value', 'record_fields': {}, 'enum_values': [{'index': 536, 'name': 'name_value', 'deprecated': 'deprecated_value'}], 'map_items': {}, 'array_items': {}, 'type_id': 'type_id_value', 'type_ref': 'type_ref_value', 'constraints': {'required': True}, 'annotations': {'deprecated': 'deprecated_value', 'display_name': 'display_name_value', 'description': 'description_value', 'display_order': 1393, 'string_type': 'string_type_value', 'string_values': ['string_values_value1', 'string_values_value2']}}, 'transfer_status': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] - else: - del request_init["aspect_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_aspect_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_aspect_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_aspect_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_aspect_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_aspect_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateAspectTypeRequest.pb(catalog.UpdateAspectTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.UpdateAspectTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_aspect_type_rest_bad_request(request_type=catalog.DeleteAspectTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_aspect_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteAspectTypeRequest, - dict, -]) -def test_delete_aspect_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_aspect_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_aspect_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_aspect_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_aspect_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteAspectTypeRequest.pb(catalog.DeleteAspectTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.DeleteAspectTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_aspect_types_rest_bad_request(request_type=catalog.ListAspectTypesRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_aspect_types(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListAspectTypesRequest, - dict, -]) -def test_list_aspect_types_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_aspect_types(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_aspect_types_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_aspect_types") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_aspect_types") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListAspectTypesResponse.to_json(catalog.ListAspectTypesResponse()) - req.return_value.content = return_value - - request = catalog.ListAspectTypesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.ListAspectTypesResponse() - post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata - - client.list_aspect_types(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_aspect_type(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetAspectTypeRequest, - dict, -]) -def test_get_aspect_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/aspectTypes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.AspectType( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_aspect_type(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_aspect_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_aspect_type") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_aspect_type") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.AspectType.to_json(catalog.AspectType()) - req.return_value.content = return_value - - request = catalog.GetAspectTypeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.AspectType() - post_with_metadata.return_value = catalog.AspectType(), metadata - - client.get_aspect_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_entry_group_rest_bad_request(request_type=catalog.CreateEntryGroupRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_group(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryGroupRequest, - dict, -]) -def test_create_entry_group_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["entry_group"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'transfer_status': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] - else: - del request_init["entry_group"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_group(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_group") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_group_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_group") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryGroupRequest.pb(catalog.CreateEntryGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.CreateEntryGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_entry_group_rest_bad_request(request_type=catalog.UpdateEntryGroupRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_group(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryGroupRequest, - dict, -]) -def test_update_entry_group_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'entry_group': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'}} - request_init["entry_group"] = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'transfer_status': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] - else: - del request_init["entry_group"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_group(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_group") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_group_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry_group") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryGroupRequest.pb(catalog.UpdateEntryGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.UpdateEntryGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_group_rest_bad_request(request_type=catalog.DeleteEntryGroupRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_group(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryGroupRequest, - dict, -]) -def test_delete_entry_group_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_group(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_group") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_group_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_group") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryGroupRequest.pb(catalog.DeleteEntryGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.DeleteEntryGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_entry_groups_rest_bad_request(request_type=catalog.ListEntryGroupsRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_groups(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntryGroupsRequest, - dict, -]) -def test_list_entry_groups_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_groups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_groups_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_groups") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entry_groups") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryGroupsResponse.to_json(catalog.ListEntryGroupsResponse()) - req.return_value.content = return_value - - request = catalog.ListEntryGroupsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.ListEntryGroupsResponse() - post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata - - client.list_entry_groups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_group(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryGroupRequest, - dict, -]) -def test_get_entry_group_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_group(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_group") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_group") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) - req.return_value.content = return_value - - request = catalog.GetEntryGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.EntryGroup() - post_with_metadata.return_value = catalog.EntryGroup(), metadata - - client.get_entry_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryRequest, - dict, -]) -def test_create_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request_init["entry"] = {'name': 'name_value', 'entry_type': 'entry_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'aspects': {}, 'parent_entry': 'parent_entry_value', 'fully_qualified_name': 'fully_qualified_name_value', 'entry_source': {'resource': 'resource_value', 'system': 'system_value', 'platform': 'platform_value', 'display_name': 'display_name_value', 'description': 'description_value', 'labels': {}, 'ancestors': [{'name': 'name_value', 'type_': 'type__value'}], 'create_time': {}, 'update_time': {}, 'location': 'location_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) - req.return_value.content = return_value - - request = catalog.CreateEntryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata - - client.create_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.UpdateEntryRequest, - dict, -]) -def test_update_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'entry': {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'}} - request_init["entry"] = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4', 'entry_type': 'entry_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'aspects': {}, 'parent_entry': 'parent_entry_value', 'fully_qualified_name': 'fully_qualified_name_value', 'entry_source': {'resource': 'resource_value', 'system': 'system_value', 'platform': 'platform_value', 'display_name': 'display_name_value', 'description': 'description_value', 'labels': {}, 'ancestors': [{'name': 'name_value', 'type_': 'type__value'}], 'create_time': {}, 'update_time': {}, 'location': 'location_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_update_entry") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) - req.return_value.content = return_value - - request = catalog.UpdateEntryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata - - client.update_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryRequest, - dict, -]) -def test_delete_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) - req.return_value.content = return_value - - request = catalog.DeleteEntryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata - - client.delete_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entries(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListEntriesRequest, - dict, -]) -def test_list_entries_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entries_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entries") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntriesResponse.to_json(catalog.ListEntriesResponse()) - req.return_value.content = return_value - - request = catalog.ListEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.ListEntriesResponse() - post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata - - client.list_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryRequest, - dict, -]) -def test_get_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) - req.return_value.content = return_value - - request = catalog.GetEntryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata - - client.get_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lookup_entry(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.LookupEntryRequest, - dict, -]) -def test_lookup_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name='name_value', - entry_type='entry_type_value', - parent_entry='parent_entry_value', - fully_qualified_name='fully_qualified_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lookup_entry(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == 'name_value' - assert response.entry_type == 'entry_type_value' - assert response.parent_entry == 'parent_entry_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_entry_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_lookup_entry") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_lookup_entry") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) - req.return_value.content = return_value - - request = catalog.LookupEntryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata - - client.lookup_entry(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_entries(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.SearchEntriesRequest, - dict, -]) -def test_search_entries_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_entries_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_search_entries") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_search_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.SearchEntriesResponse.to_json(catalog.SearchEntriesResponse()) - req.return_value.content = return_value - - request = catalog.SearchEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.SearchEntriesResponse() - post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata - - client.search_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_metadata_job_rest_bad_request(request_type=catalog.CreateMetadataJobRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_metadata_job(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateMetadataJobRequest, - dict, -]) -def test_create_metadata_job_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["metadata_job"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'type_': 1, 'import_spec': {'source_storage_uri': 'source_storage_uri_value', 'source_create_time': {}, 'scope': {'entry_groups': ['entry_groups_value1', 'entry_groups_value2'], 'entry_types': ['entry_types_value1', 'entry_types_value2'], 'aspect_types': ['aspect_types_value1', 'aspect_types_value2'], 'glossaries': ['glossaries_value1', 'glossaries_value2'], 'entry_link_types': ['entry_link_types_value1', 'entry_link_types_value2'], 'referenced_entry_scopes': ['referenced_entry_scopes_value1', 'referenced_entry_scopes_value2']}, 'entry_sync_mode': 1, 'aspect_sync_mode': 1, 'log_level': 1}, 'export_spec': {'scope': {'organization_level': True, 'projects': ['projects_value1', 'projects_value2'], 'entry_groups': ['entry_groups_value1', 'entry_groups_value2'], 'entry_types': ['entry_types_value1', 'entry_types_value2'], 'aspect_types': ['aspect_types_value1', 'aspect_types_value2']}, 'output_path': 'output_path_value'}, 'import_result': {'deleted_entries': 1584, 'updated_entries': 1600, 'created_entries': 1585, 'unchanged_entries': 1798, 'recreated_entries': 1800, 'update_time': {}, 'deleted_entry_links': 2024, 'created_entry_links': 2025, 'unchanged_entry_links': 2238}, 'export_result': {'exported_entries': 1732, 'error_message': 'error_message_value'}, 'status': {'state': 1, 'message': 'message_value', 'completion_percent': 1930, 'update_time': {}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["metadata_job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["metadata_job"][field])): - del request_init["metadata_job"][field][i][subfield] - else: - del request_init["metadata_job"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_metadata_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_metadata_job_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_metadata_job") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_metadata_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_metadata_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateMetadataJobRequest.pb(catalog.CreateMetadataJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.CreateMetadataJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_metadata_job(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetMetadataJobRequest, - dict, -]) -def test_get_metadata_job_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob( - name='name_value', - uid='uid_value', - type_=catalog.MetadataJob.Type.IMPORT, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_metadata_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.type_ == catalog.MetadataJob.Type.IMPORT - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_metadata_job_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_metadata_job") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_metadata_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) - req.return_value.content = return_value - - request = catalog.GetMetadataJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.MetadataJob() - post_with_metadata.return_value = catalog.MetadataJob(), metadata - - client.get_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_metadata_jobs_rest_bad_request(request_type=catalog.ListMetadataJobsRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_metadata_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.ListMetadataJobsRequest, - dict, -]) -def test_list_metadata_jobs_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_metadata_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_metadata_jobs_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.ListMetadataJobsRequest.pb(catalog.ListMetadataJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListMetadataJobsResponse.to_json(catalog.ListMetadataJobsResponse()) - req.return_value.content = return_value - - request = catalog.ListMetadataJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.ListMetadataJobsResponse() - post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata - - client.list_metadata_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_metadata_job_rest_bad_request(request_type=catalog.CancelMetadataJobRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_metadata_job(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CancelMetadataJobRequest, - dict, -]) -def test_cancel_metadata_job_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/metadataJobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_metadata_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_metadata_job_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job") as pre: - pre.assert_not_called() - pb_message = catalog.CancelMetadataJobRequest.pb(catalog.CancelMetadataJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = catalog.CancelMetadataJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_metadata_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_entry_link_rest_bad_request(request_type=catalog.CreateEntryLinkRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_link(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.CreateEntryLinkRequest, - dict, -]) -def test_create_entry_link_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/entryGroups/sample3'} - request_init["entry_link"] = {'name': 'name_value', 'entry_link_type': 'entry_link_type_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'entry_references': [{'name': 'name_value', 'path': 'path_value', 'type_': 2}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryLinkRequest.meta.fields["entry_link"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_link"][field])): - del request_init["entry_link"][field][i][subfield] - else: - del request_init["entry_link"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_link(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_link_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_link") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_create_entry_link_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_create_entry_link") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryLinkRequest.pb(catalog.CreateEntryLinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryLink.to_json(catalog.EntryLink()) - req.return_value.content = return_value - - request = catalog.CreateEntryLinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.EntryLink() - post_with_metadata.return_value = catalog.EntryLink(), metadata - - client.create_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_link_rest_bad_request(request_type=catalog.DeleteEntryLinkRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_link(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.DeleteEntryLinkRequest, - dict, -]) -def test_delete_entry_link_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_link(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_link_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_link") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_delete_entry_link_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_delete_entry_link") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryLinkRequest.pb(catalog.DeleteEntryLinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryLink.to_json(catalog.EntryLink()) - req.return_value.content = return_value - - request = catalog.DeleteEntryLinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.EntryLink() - post_with_metadata.return_value = catalog.EntryLink(), metadata - - client.delete_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_entry_link_rest_bad_request(request_type=catalog.GetEntryLinkRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_link(request) - - -@pytest.mark.parametrize("request_type", [ - catalog.GetEntryLinkRequest, - dict, -]) -def test_get_entry_link_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = catalog.EntryLink( - name='name_value', - entry_link_type='entry_link_type_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_link(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryLink) - assert response.name == 'name_value' - assert response.entry_link_type == 'entry_link_type_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_link_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_link") as post, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "post_get_entry_link_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CatalogServiceRestInterceptor, "pre_get_entry_link") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryLinkRequest.pb(catalog.GetEntryLinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryLink.to_json(catalog.EntryLink()) - req.return_value.content = return_value - - request = catalog.GetEntryLinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = catalog.EntryLink() - post_with_metadata.return_value = catalog.EntryLink(), metadata - - client.get_entry_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), - '__call__') as call: - client.create_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), - '__call__') as call: - client.update_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), - '__call__') as call: - client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_types_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_types), - '__call__') as call: - client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_type), - '__call__') as call: - client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_aspect_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), - '__call__') as call: - client.create_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_aspect_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), - '__call__') as call: - client.update_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_aspect_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), - '__call__') as call: - client.delete_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_aspect_types_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), - '__call__') as call: - client.list_aspect_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_aspect_type_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_aspect_type), - '__call__') as call: - client.get_aspect_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_group_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - client.update_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_group_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_groups_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - client.list_entry_groups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - client.get_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - client.create_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - client.update_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entries_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - client.list_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - client.get_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lookup_entry_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_entries_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_entries), - '__call__') as call: - client.search_entries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_metadata_job_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), - '__call__') as call: - client.create_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_metadata_job_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_metadata_job), - '__call__') as call: - client.get_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_metadata_jobs_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), - '__call__') as call: - client.list_metadata_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_metadata_job_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), - '__call__') as call: - client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_link_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_link), - '__call__') as call: - client.create_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_link_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_link), - '__call__') as call: - client.delete_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryLinkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_link_empty_call_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_link), - '__call__') as call: - client.get_entry_link(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryLinkRequest() - - assert args[0] == request_msg - - -def test_catalog_service_rest_lro_client(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CatalogServiceGrpcTransport, - ) - -def test_catalog_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CatalogServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_catalog_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CatalogServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_entry_type', - 'update_entry_type', - 'delete_entry_type', - 'list_entry_types', - 'get_entry_type', - 'create_aspect_type', - 'update_aspect_type', - 'delete_aspect_type', - 'list_aspect_types', - 'get_aspect_type', - 'create_entry_group', - 'update_entry_group', - 'delete_entry_group', - 'list_entry_groups', - 'get_entry_group', - 'create_entry', - 'update_entry', - 'delete_entry', - 'list_entries', - 'get_entry', - 'lookup_entry', - 'search_entries', - 'create_metadata_job', - 'get_metadata_job', - 'list_metadata_jobs', - 'cancel_metadata_job', - 'create_entry_link', - 'delete_entry_link', - 'get_entry_link', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_catalog_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CatalogServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_catalog_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CatalogServiceTransport() - adc.assert_called_once() - - -def test_catalog_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CatalogServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - ], -) -def test_catalog_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - transports.CatalogServiceRestTransport, - ], -) -def test_catalog_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CatalogServiceGrpcTransport, grpc_helpers), - (transports.CatalogServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_catalog_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_catalog_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CatalogServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_catalog_service_host_no_port(transport_name): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_catalog_service_host_with_port(transport_name): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_catalog_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CatalogServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CatalogServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_entry_type._session - session2 = client2.transport.create_entry_type._session - assert session1 != session2 - session1 = client1.transport.update_entry_type._session - session2 = client2.transport.update_entry_type._session - assert session1 != session2 - session1 = client1.transport.delete_entry_type._session - session2 = client2.transport.delete_entry_type._session - assert session1 != session2 - session1 = client1.transport.list_entry_types._session - session2 = client2.transport.list_entry_types._session - assert session1 != session2 - session1 = client1.transport.get_entry_type._session - session2 = client2.transport.get_entry_type._session - assert session1 != session2 - session1 = client1.transport.create_aspect_type._session - session2 = client2.transport.create_aspect_type._session - assert session1 != session2 - session1 = client1.transport.update_aspect_type._session - session2 = client2.transport.update_aspect_type._session - assert session1 != session2 - session1 = client1.transport.delete_aspect_type._session - session2 = client2.transport.delete_aspect_type._session - assert session1 != session2 - session1 = client1.transport.list_aspect_types._session - session2 = client2.transport.list_aspect_types._session - assert session1 != session2 - session1 = client1.transport.get_aspect_type._session - session2 = client2.transport.get_aspect_type._session - assert session1 != session2 - session1 = client1.transport.create_entry_group._session - session2 = client2.transport.create_entry_group._session - assert session1 != session2 - session1 = client1.transport.update_entry_group._session - session2 = client2.transport.update_entry_group._session - assert session1 != session2 - session1 = client1.transport.delete_entry_group._session - session2 = client2.transport.delete_entry_group._session - assert session1 != session2 - session1 = client1.transport.list_entry_groups._session - session2 = client2.transport.list_entry_groups._session - assert session1 != session2 - session1 = client1.transport.get_entry_group._session - session2 = client2.transport.get_entry_group._session - assert session1 != session2 - session1 = client1.transport.create_entry._session - session2 = client2.transport.create_entry._session - assert session1 != session2 - session1 = client1.transport.update_entry._session - session2 = client2.transport.update_entry._session - assert session1 != session2 - session1 = client1.transport.delete_entry._session - session2 = client2.transport.delete_entry._session - assert session1 != session2 - session1 = client1.transport.list_entries._session - session2 = client2.transport.list_entries._session - assert session1 != session2 - session1 = client1.transport.get_entry._session - session2 = client2.transport.get_entry._session - assert session1 != session2 - session1 = client1.transport.lookup_entry._session - session2 = client2.transport.lookup_entry._session - assert session1 != session2 - session1 = client1.transport.search_entries._session - session2 = client2.transport.search_entries._session - assert session1 != session2 - session1 = client1.transport.create_metadata_job._session - session2 = client2.transport.create_metadata_job._session - assert session1 != session2 - session1 = client1.transport.get_metadata_job._session - session2 = client2.transport.get_metadata_job._session - assert session1 != session2 - session1 = client1.transport.list_metadata_jobs._session - session2 = client2.transport.list_metadata_jobs._session - assert session1 != session2 - session1 = client1.transport.cancel_metadata_job._session - session2 = client2.transport.cancel_metadata_job._session - assert session1 != session2 - session1 = client1.transport.create_entry_link._session - session2 = client2.transport.create_entry_link._session - assert session1 != session2 - session1 = client1.transport.delete_entry_link._session - session2 = client2.transport.delete_entry_link._session - assert session1 != session2 - session1 = client1.transport.get_entry_link._session - session2 = client2.transport.get_entry_link._session - assert session1 != session2 -def test_catalog_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CatalogServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_catalog_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CatalogServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CatalogServiceGrpcTransport, transports.CatalogServiceGrpcAsyncIOTransport]) -def test_catalog_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_catalog_service_grpc_lro_client(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_catalog_service_grpc_lro_async_client(): - client = CatalogServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_aspect_type_path(): - project = "squid" - location = "clam" - aspect_type = "whelk" - expected = "projects/{project}/locations/{location}/aspectTypes/{aspect_type}".format(project=project, location=location, aspect_type=aspect_type, ) - actual = CatalogServiceClient.aspect_type_path(project, location, aspect_type) - assert expected == actual - - -def test_parse_aspect_type_path(): - expected = { - "project": "octopus", - "location": "oyster", - "aspect_type": "nudibranch", - } - path = CatalogServiceClient.aspect_type_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_aspect_type_path(path) - assert expected == actual - -def test_entry_path(): - project = "cuttlefish" - location = "mussel" - entry_group = "winkle" - entry = "nautilus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - actual = CatalogServiceClient.entry_path(project, location, entry_group, entry) - assert expected == actual - - -def test_parse_entry_path(): - expected = { - "project": "scallop", - "location": "abalone", - "entry_group": "squid", - "entry": "clam", - } - path = CatalogServiceClient.entry_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_path(path) - assert expected == actual - -def test_entry_group_path(): - project = "whelk" - location = "octopus" - entry_group = "oyster" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - actual = CatalogServiceClient.entry_group_path(project, location, entry_group) - assert expected == actual - - -def test_parse_entry_group_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - } - path = CatalogServiceClient.entry_group_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_group_path(path) - assert expected == actual - -def test_entry_link_path(): - project = "winkle" - location = "nautilus" - entry_group = "scallop" - entry_link = "abalone" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format(project=project, location=location, entry_group=entry_group, entry_link=entry_link, ) - actual = CatalogServiceClient.entry_link_path(project, location, entry_group, entry_link) - assert expected == actual - - -def test_parse_entry_link_path(): - expected = { - "project": "squid", - "location": "clam", - "entry_group": "whelk", - "entry_link": "octopus", - } - path = CatalogServiceClient.entry_link_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_link_path(path) - assert expected == actual - -def test_entry_type_path(): - project = "oyster" - location = "nudibranch" - entry_type = "cuttlefish" - expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format(project=project, location=location, entry_type=entry_type, ) - actual = CatalogServiceClient.entry_type_path(project, location, entry_type) - assert expected == actual - - -def test_parse_entry_type_path(): - expected = { - "project": "mussel", - "location": "winkle", - "entry_type": "nautilus", - } - path = CatalogServiceClient.entry_type_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_entry_type_path(path) - assert expected == actual - -def test_glossary_path(): - project = "scallop" - location = "abalone" - glossary = "squid" - expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) - actual = CatalogServiceClient.glossary_path(project, location, glossary) - assert expected == actual - - -def test_parse_glossary_path(): - expected = { - "project": "clam", - "location": "whelk", - "glossary": "octopus", - } - path = CatalogServiceClient.glossary_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_glossary_path(path) - assert expected == actual - -def test_metadata_job_path(): - project = "oyster" - location = "nudibranch" - metadataJob = "cuttlefish" - expected = "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format(project=project, location=location, metadataJob=metadataJob, ) - actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) - assert expected == actual - - -def test_parse_metadata_job_path(): - expected = { - "project": "mussel", - "location": "winkle", - "metadataJob": "nautilus", - } - path = CatalogServiceClient.metadata_job_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_metadata_job_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CatalogServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = CatalogServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = CatalogServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = CatalogServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CatalogServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = CatalogServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = CatalogServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = CatalogServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CatalogServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = CatalogServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CatalogServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CatalogServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = CatalogServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport), - (CatalogServiceAsyncClient, transports.CatalogServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py deleted file mode 100644 index 8e1bbd8a586b..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_cmek_service.py +++ /dev/null @@ -1,6432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.cmek_service import CmekServiceAsyncClient -from google.cloud.dataplex_v1.services.cmek_service import CmekServiceClient -from google.cloud.dataplex_v1.services.cmek_service import pagers -from google.cloud.dataplex_v1.services.cmek_service import transports -from google.cloud.dataplex_v1.types import cmek -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CmekServiceClient._get_default_mtls_endpoint(None) is None - assert CmekServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CmekServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CmekServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CmekServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CmekServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert CmekServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CmekServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CmekServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - CmekServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CmekServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CmekServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CmekServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - CmekServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CmekServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert CmekServiceClient._get_client_cert_source(None, False) is None - assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert CmekServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert CmekServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) -@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = CmekServiceClient._DEFAULT_UNIVERSE - default_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert CmekServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT - assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "always") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT - assert CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CmekServiceClient.DEFAULT_MTLS_ENDPOINT - assert CmekServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert CmekServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - CmekServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert CmekServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert CmekServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert CmekServiceClient._get_universe_domain(None, None) == CmekServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - CmekServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = CmekServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = CmekServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (CmekServiceClient, "grpc"), - (CmekServiceAsyncClient, "grpc_asyncio"), - (CmekServiceClient, "rest"), -]) -def test_cmek_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CmekServiceGrpcTransport, "grpc"), - (transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CmekServiceRestTransport, "rest"), -]) -def test_cmek_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CmekServiceClient, "grpc"), - (CmekServiceAsyncClient, "grpc_asyncio"), - (CmekServiceClient, "rest"), -]) -def test_cmek_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_cmek_service_client_get_transport_class(): - transport = CmekServiceClient.get_transport_class() - available_transports = [ - transports.CmekServiceGrpcTransport, - transports.CmekServiceRestTransport, - ] - assert transport in available_transports - - transport = CmekServiceClient.get_transport_class("grpc") - assert transport == transports.CmekServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc"), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (CmekServiceClient, transports.CmekServiceRestTransport, "rest"), -]) -@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) -@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) -def test_cmek_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CmekServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CmekServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", "true"), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", "false"), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CmekServiceClient, transports.CmekServiceRestTransport, "rest", "true"), - (CmekServiceClient, transports.CmekServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) -@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cmek_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - CmekServiceClient, CmekServiceAsyncClient -]) -@mock.patch.object(CmekServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CmekServiceClient)) -@mock.patch.object(CmekServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CmekServiceAsyncClient)) -def test_cmek_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - CmekServiceClient, CmekServiceAsyncClient -]) -@mock.patch.object(CmekServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceClient)) -@mock.patch.object(CmekServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CmekServiceAsyncClient)) -def test_cmek_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = CmekServiceClient._DEFAULT_UNIVERSE - default_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = CmekServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc"), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (CmekServiceClient, transports.CmekServiceRestTransport, "rest"), -]) -def test_cmek_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", grpc_helpers), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CmekServiceClient, transports.CmekServiceRestTransport, "rest", None), -]) -def test_cmek_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_cmek_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CmekServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport, "grpc", grpc_helpers), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cmek_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - cmek.CreateEncryptionConfigRequest, - dict, -]) -def test_create_encryption_config(request_type, transport: str = 'grpc'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cmek.CreateEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_encryption_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cmek.CreateEncryptionConfigRequest( - parent='parent_value', - encryption_config_id='encryption_config_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_encryption_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cmek.CreateEncryptionConfigRequest( - parent='parent_value', - encryption_config_id='encryption_config_id_value', - ) - -def test_create_encryption_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_encryption_config] = mock_rpc - request = {} - client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_encryption_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_encryption_config] = mock_rpc - - request = {} - await client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.CreateEncryptionConfigRequest): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = cmek.CreateEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_encryption_config_async_from_dict(): - await test_create_encryption_config_async(request_type=dict) - -def test_create_encryption_config_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.CreateEncryptionConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_encryption_config_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.CreateEncryptionConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_encryption_config_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_encryption_config( - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].encryption_config - mock_val = cmek.EncryptionConfig(name='name_value') - assert arg == mock_val - arg = args[0].encryption_config_id - mock_val = 'encryption_config_id_value' - assert arg == mock_val - - -def test_create_encryption_config_flattened_error(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_encryption_config( - cmek.CreateEncryptionConfigRequest(), - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - -@pytest.mark.asyncio -async def test_create_encryption_config_flattened_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_encryption_config( - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].encryption_config - mock_val = cmek.EncryptionConfig(name='name_value') - assert arg == mock_val - arg = args[0].encryption_config_id - mock_val = 'encryption_config_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_encryption_config_flattened_error_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_encryption_config( - cmek.CreateEncryptionConfigRequest(), - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cmek.UpdateEncryptionConfigRequest, - dict, -]) -def test_update_encryption_config(request_type, transport: str = 'grpc'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cmek.UpdateEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_encryption_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cmek.UpdateEncryptionConfigRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_encryption_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cmek.UpdateEncryptionConfigRequest( - ) - -def test_update_encryption_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_encryption_config] = mock_rpc - request = {} - client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_encryption_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_encryption_config] = mock_rpc - - request = {} - await client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.UpdateEncryptionConfigRequest): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = cmek.UpdateEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_encryption_config_async_from_dict(): - await test_update_encryption_config_async(request_type=dict) - -def test_update_encryption_config_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.UpdateEncryptionConfigRequest() - - request.encryption_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'encryption_config.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_encryption_config_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.UpdateEncryptionConfigRequest() - - request.encryption_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'encryption_config.name=name_value', - ) in kw['metadata'] - - -def test_update_encryption_config_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_encryption_config( - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].encryption_config - mock_val = cmek.EncryptionConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_encryption_config_flattened_error(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_encryption_config( - cmek.UpdateEncryptionConfigRequest(), - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_encryption_config_flattened_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_encryption_config( - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].encryption_config - mock_val = cmek.EncryptionConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_encryption_config_flattened_error_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_encryption_config( - cmek.UpdateEncryptionConfigRequest(), - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - cmek.DeleteEncryptionConfigRequest, - dict, -]) -def test_delete_encryption_config(request_type, transport: str = 'grpc'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cmek.DeleteEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_encryption_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cmek.DeleteEncryptionConfigRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_encryption_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cmek.DeleteEncryptionConfigRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_encryption_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_encryption_config] = mock_rpc - request = {} - client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_encryption_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_encryption_config] = mock_rpc - - request = {} - await client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.DeleteEncryptionConfigRequest): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = cmek.DeleteEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_encryption_config_async_from_dict(): - await test_delete_encryption_config_async(request_type=dict) - -def test_delete_encryption_config_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.DeleteEncryptionConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_encryption_config_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.DeleteEncryptionConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_encryption_config_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_encryption_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_encryption_config_flattened_error(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_encryption_config( - cmek.DeleteEncryptionConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_encryption_config_flattened_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_encryption_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_encryption_config_flattened_error_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_encryption_config( - cmek.DeleteEncryptionConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cmek.ListEncryptionConfigsRequest, - dict, -]) -def test_list_encryption_configs(request_type, transport: str = 'grpc'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.ListEncryptionConfigsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cmek.ListEncryptionConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEncryptionConfigsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_encryption_configs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cmek.ListEncryptionConfigsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_encryption_configs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cmek.ListEncryptionConfigsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_encryption_configs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_encryption_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_encryption_configs] = mock_rpc - request = {} - client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_encryption_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_encryption_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_encryption_configs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_encryption_configs] = mock_rpc - - request = {} - await client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_encryption_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_encryption_configs_async(transport: str = 'grpc_asyncio', request_type=cmek.ListEncryptionConfigsRequest): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = cmek.ListEncryptionConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEncryptionConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_encryption_configs_async_from_dict(): - await test_list_encryption_configs_async(request_type=dict) - -def test_list_encryption_configs_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.ListEncryptionConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - call.return_value = cmek.ListEncryptionConfigsResponse() - client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_encryption_configs_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.ListEncryptionConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse()) - await client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_encryption_configs_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.ListEncryptionConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_encryption_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_encryption_configs_flattened_error(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_encryption_configs( - cmek.ListEncryptionConfigsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_encryption_configs_flattened_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.ListEncryptionConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_encryption_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_encryption_configs_flattened_error_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_encryption_configs( - cmek.ListEncryptionConfigsRequest(), - parent='parent_value', - ) - - -def test_list_encryption_configs_pager(transport_name: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - next_page_token='abc', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[], - next_page_token='def', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - ], - next_page_token='ghi', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_encryption_configs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cmek.EncryptionConfig) - for i in results) -def test_list_encryption_configs_pages(transport_name: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - next_page_token='abc', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[], - next_page_token='def', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - ], - next_page_token='ghi', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_encryption_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_encryption_configs_async_pager(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - next_page_token='abc', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[], - next_page_token='def', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - ], - next_page_token='ghi', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_encryption_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cmek.EncryptionConfig) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_encryption_configs_async_pages(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - next_page_token='abc', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[], - next_page_token='def', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - ], - next_page_token='ghi', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_encryption_configs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - cmek.GetEncryptionConfigRequest, - dict, -]) -def test_get_encryption_config(request_type, transport: str = 'grpc'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.EncryptionConfig( - name='name_value', - key='key_value', - encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, - etag='etag_value', - ) - response = client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = cmek.GetEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cmek.EncryptionConfig) - assert response.name == 'name_value' - assert response.key == 'key_value' - assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING - assert response.etag == 'etag_value' - - -def test_get_encryption_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = cmek.GetEncryptionConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_encryption_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cmek.GetEncryptionConfigRequest( - name='name_value', - ) - -def test_get_encryption_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_encryption_config] = mock_rpc - request = {} - client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_encryption_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_encryption_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_encryption_config] = mock_rpc - - request = {} - await client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_encryption_config_async(transport: str = 'grpc_asyncio', request_type=cmek.GetEncryptionConfigRequest): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig( - name='name_value', - key='key_value', - encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, - etag='etag_value', - )) - response = await client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = cmek.GetEncryptionConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cmek.EncryptionConfig) - assert response.name == 'name_value' - assert response.key == 'key_value' - assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_encryption_config_async_from_dict(): - await test_get_encryption_config_async(request_type=dict) - -def test_get_encryption_config_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.GetEncryptionConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - call.return_value = cmek.EncryptionConfig() - client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_encryption_config_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cmek.GetEncryptionConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig()) - await client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_encryption_config_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.EncryptionConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_encryption_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_encryption_config_flattened_error(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_encryption_config( - cmek.GetEncryptionConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_encryption_config_flattened_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cmek.EncryptionConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_encryption_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_encryption_config_flattened_error_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_encryption_config( - cmek.GetEncryptionConfigRequest(), - name='name_value', - ) - - -def test_create_encryption_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_encryption_config] = mock_rpc - - request = {} - client.create_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_encryption_config_rest_required_fields(request_type=cmek.CreateEncryptionConfigRequest): - transport_class = transports.CmekServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["encryption_config_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "encryptionConfigId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_encryption_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "encryptionConfigId" in jsonified_request - assert jsonified_request["encryptionConfigId"] == request_init["encryption_config_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["encryptionConfigId"] = 'encryption_config_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_encryption_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("encryption_config_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "encryptionConfigId" in jsonified_request - assert jsonified_request["encryptionConfigId"] == 'encryption_config_id_value' - - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_encryption_config(request) - - expected_params = [ - ( - "encryptionConfigId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_encryption_config_rest_unset_required_fields(): - transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_encryption_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("encryptionConfigId", )) & set(("parent", "encryptionConfigId", "encryptionConfig", ))) - - -def test_create_encryption_config_rest_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_encryption_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/encryptionConfigs" % client.transport._host, args[1]) - - -def test_create_encryption_config_rest_flattened_error(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_encryption_config( - cmek.CreateEncryptionConfigRequest(), - parent='parent_value', - encryption_config=cmek.EncryptionConfig(name='name_value'), - encryption_config_id='encryption_config_id_value', - ) - - -def test_update_encryption_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_encryption_config] = mock_rpc - - request = {} - client.update_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_encryption_config_rest_required_fields(request_type=cmek.UpdateEncryptionConfigRequest): - transport_class = transports.CmekServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_encryption_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_encryption_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_encryption_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_encryption_config_rest_unset_required_fields(): - transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_encryption_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("encryptionConfig", ))) - - -def test_update_encryption_config_rest_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_encryption_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{encryption_config.name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) - - -def test_update_encryption_config_rest_flattened_error(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_encryption_config( - cmek.UpdateEncryptionConfigRequest(), - encryption_config=cmek.EncryptionConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_encryption_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_encryption_config] = mock_rpc - - request = {} - client.delete_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_encryption_config_rest_required_fields(request_type=cmek.DeleteEncryptionConfigRequest): - transport_class = transports.CmekServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_encryption_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_encryption_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_encryption_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_encryption_config_rest_unset_required_fields(): - transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_encryption_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_encryption_config_rest_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_encryption_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) - - -def test_delete_encryption_config_rest_flattened_error(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_encryption_config( - cmek.DeleteEncryptionConfigRequest(), - name='name_value', - ) - - -def test_list_encryption_configs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_encryption_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_encryption_configs] = mock_rpc - - request = {} - client.list_encryption_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_encryption_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_encryption_configs_rest_required_fields(request_type=cmek.ListEncryptionConfigsRequest): - transport_class = transports.CmekServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_encryption_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_encryption_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cmek.ListEncryptionConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_encryption_configs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_encryption_configs_rest_unset_required_fields(): - transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_encryption_configs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_encryption_configs_rest_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cmek.ListEncryptionConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_encryption_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/encryptionConfigs" % client.transport._host, args[1]) - - -def test_list_encryption_configs_rest_flattened_error(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_encryption_configs( - cmek.ListEncryptionConfigsRequest(), - parent='parent_value', - ) - - -def test_list_encryption_configs_rest_pager(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - next_page_token='abc', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[], - next_page_token='def', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - ], - next_page_token='ghi', - ), - cmek.ListEncryptionConfigsResponse( - encryption_configs=[ - cmek.EncryptionConfig(), - cmek.EncryptionConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cmek.ListEncryptionConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_encryption_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cmek.EncryptionConfig) - for i in results) - - pages = list(client.list_encryption_configs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_encryption_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_encryption_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_encryption_config] = mock_rpc - - request = {} - client.get_encryption_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_encryption_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_encryption_config_rest_required_fields(request_type=cmek.GetEncryptionConfigRequest): - transport_class = transports.CmekServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_encryption_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_encryption_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cmek.EncryptionConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cmek.EncryptionConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_encryption_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_encryption_config_rest_unset_required_fields(): - transport = transports.CmekServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_encryption_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_encryption_config_rest_flattened(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cmek.EncryptionConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cmek.EncryptionConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_encryption_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=organizations/*/locations/*/encryptionConfigs/*}" % client.transport._host, args[1]) - - -def test_get_encryption_config_rest_flattened_error(transport: str = 'rest'): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_encryption_config( - cmek.GetEncryptionConfigRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CmekServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CmekServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CmekServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CmekServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CmekServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CmekServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CmekServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CmekServiceGrpcTransport, - transports.CmekServiceGrpcAsyncIOTransport, - transports.CmekServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = CmekServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_encryption_config_empty_call_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.CreateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_encryption_config_empty_call_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.UpdateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_encryption_config_empty_call_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.DeleteEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_encryption_configs_empty_call_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - call.return_value = cmek.ListEncryptionConfigsResponse() - client.list_encryption_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.ListEncryptionConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_encryption_config_empty_call_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - call.return_value = cmek.EncryptionConfig() - client.get_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.GetEncryptionConfigRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = CmekServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_encryption_config_empty_call_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.CreateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_encryption_config_empty_call_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.UpdateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_encryption_config_empty_call_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.DeleteEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_encryption_configs_empty_call_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.ListEncryptionConfigsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_encryption_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.ListEncryptionConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_encryption_config_empty_call_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cmek.EncryptionConfig( - name='name_value', - key='key_value', - encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, - etag='etag_value', - )) - await client.get_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.GetEncryptionConfigRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = CmekServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_encryption_config_rest_bad_request(request_type=cmek.CreateEncryptionConfigRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_encryption_config(request) - - -@pytest.mark.parametrize("request_type", [ - cmek.CreateEncryptionConfigRequest, - dict, -]) -def test_create_encryption_config_rest_call_success(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request_init["encryption_config"] = {'name': 'name_value', 'key': 'key_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'encryption_state': 1, 'etag': 'etag_value', 'failure_details': {'error_code': 1, 'error_message': 'error_message_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cmek.CreateEncryptionConfigRequest.meta.fields["encryption_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["encryption_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["encryption_config"][field])): - del request_init["encryption_config"][field][i][subfield] - else: - del request_init["encryption_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_encryption_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_encryption_config_rest_interceptors(null_interceptor): - transport = transports.CmekServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), - ) - client = CmekServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_create_encryption_config") as post, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_create_encryption_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "pre_create_encryption_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = cmek.CreateEncryptionConfigRequest.pb(cmek.CreateEncryptionConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = cmek.CreateEncryptionConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_encryption_config_rest_bad_request(request_type=cmek.UpdateEncryptionConfigRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_encryption_config(request) - - -@pytest.mark.parametrize("request_type", [ - cmek.UpdateEncryptionConfigRequest, - dict, -]) -def test_update_encryption_config_rest_call_success(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'encryption_config': {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'}} - request_init["encryption_config"] = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3', 'key': 'key_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'encryption_state': 1, 'etag': 'etag_value', 'failure_details': {'error_code': 1, 'error_message': 'error_message_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cmek.UpdateEncryptionConfigRequest.meta.fields["encryption_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["encryption_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["encryption_config"][field])): - del request_init["encryption_config"][field][i][subfield] - else: - del request_init["encryption_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_encryption_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_encryption_config_rest_interceptors(null_interceptor): - transport = transports.CmekServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), - ) - client = CmekServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_update_encryption_config") as post, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_update_encryption_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "pre_update_encryption_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = cmek.UpdateEncryptionConfigRequest.pb(cmek.UpdateEncryptionConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = cmek.UpdateEncryptionConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_encryption_config_rest_bad_request(request_type=cmek.DeleteEncryptionConfigRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_encryption_config(request) - - -@pytest.mark.parametrize("request_type", [ - cmek.DeleteEncryptionConfigRequest, - dict, -]) -def test_delete_encryption_config_rest_call_success(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_encryption_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_encryption_config_rest_interceptors(null_interceptor): - transport = transports.CmekServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), - ) - client = CmekServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_delete_encryption_config") as post, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_delete_encryption_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "pre_delete_encryption_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = cmek.DeleteEncryptionConfigRequest.pb(cmek.DeleteEncryptionConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = cmek.DeleteEncryptionConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_encryption_configs_rest_bad_request(request_type=cmek.ListEncryptionConfigsRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_encryption_configs(request) - - -@pytest.mark.parametrize("request_type", [ - cmek.ListEncryptionConfigsRequest, - dict, -]) -def test_list_encryption_configs_rest_call_success(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cmek.ListEncryptionConfigsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cmek.ListEncryptionConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_encryption_configs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEncryptionConfigsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_encryption_configs_rest_interceptors(null_interceptor): - transport = transports.CmekServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), - ) - client = CmekServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_list_encryption_configs") as post, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_list_encryption_configs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "pre_list_encryption_configs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = cmek.ListEncryptionConfigsRequest.pb(cmek.ListEncryptionConfigsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cmek.ListEncryptionConfigsResponse.to_json(cmek.ListEncryptionConfigsResponse()) - req.return_value.content = return_value - - request = cmek.ListEncryptionConfigsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cmek.ListEncryptionConfigsResponse() - post_with_metadata.return_value = cmek.ListEncryptionConfigsResponse(), metadata - - client.list_encryption_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_encryption_config_rest_bad_request(request_type=cmek.GetEncryptionConfigRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_encryption_config(request) - - -@pytest.mark.parametrize("request_type", [ - cmek.GetEncryptionConfigRequest, - dict, -]) -def test_get_encryption_config_rest_call_success(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/encryptionConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cmek.EncryptionConfig( - name='name_value', - key='key_value', - encryption_state=cmek.EncryptionConfig.EncryptionState.ENCRYPTING, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cmek.EncryptionConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_encryption_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cmek.EncryptionConfig) - assert response.name == 'name_value' - assert response.key == 'key_value' - assert response.encryption_state == cmek.EncryptionConfig.EncryptionState.ENCRYPTING - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_encryption_config_rest_interceptors(null_interceptor): - transport = transports.CmekServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CmekServiceRestInterceptor(), - ) - client = CmekServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_get_encryption_config") as post, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "post_get_encryption_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CmekServiceRestInterceptor, "pre_get_encryption_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = cmek.GetEncryptionConfigRequest.pb(cmek.GetEncryptionConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cmek.EncryptionConfig.to_json(cmek.EncryptionConfig()) - req.return_value.content = return_value - - request = cmek.GetEncryptionConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cmek.EncryptionConfig() - post_with_metadata.return_value = cmek.EncryptionConfig(), metadata - - client.get_encryption_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_encryption_config_empty_call_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_encryption_config), - '__call__') as call: - client.create_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.CreateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_encryption_config_empty_call_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_encryption_config), - '__call__') as call: - client.update_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.UpdateEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_encryption_config_empty_call_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_encryption_config), - '__call__') as call: - client.delete_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.DeleteEncryptionConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_encryption_configs_empty_call_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_encryption_configs), - '__call__') as call: - client.list_encryption_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.ListEncryptionConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_encryption_config_empty_call_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_encryption_config), - '__call__') as call: - client.get_encryption_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = cmek.GetEncryptionConfigRequest() - - assert args[0] == request_msg - - -def test_cmek_service_rest_lro_client(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CmekServiceGrpcTransport, - ) - -def test_cmek_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CmekServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cmek_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CmekServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_encryption_config', - 'update_encryption_config', - 'delete_encryption_config', - 'list_encryption_configs', - 'get_encryption_config', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cmek_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CmekServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_cmek_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.cmek_service.transports.CmekServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CmekServiceTransport() - adc.assert_called_once() - - -def test_cmek_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CmekServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CmekServiceGrpcTransport, - transports.CmekServiceGrpcAsyncIOTransport, - ], -) -def test_cmek_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CmekServiceGrpcTransport, - transports.CmekServiceGrpcAsyncIOTransport, - transports.CmekServiceRestTransport, - ], -) -def test_cmek_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CmekServiceGrpcTransport, grpc_helpers), - (transports.CmekServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cmek_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) -def test_cmek_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_cmek_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CmekServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cmek_service_host_no_port(transport_name): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cmek_service_host_with_port(transport_name): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_cmek_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CmekServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CmekServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_encryption_config._session - session2 = client2.transport.create_encryption_config._session - assert session1 != session2 - session1 = client1.transport.update_encryption_config._session - session2 = client2.transport.update_encryption_config._session - assert session1 != session2 - session1 = client1.transport.delete_encryption_config._session - session2 = client2.transport.delete_encryption_config._session - assert session1 != session2 - session1 = client1.transport.list_encryption_configs._session - session2 = client2.transport.list_encryption_configs._session - assert session1 != session2 - session1 = client1.transport.get_encryption_config._session - session2 = client2.transport.get_encryption_config._session - assert session1 != session2 -def test_cmek_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CmekServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cmek_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CmekServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) -def test_cmek_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CmekServiceGrpcTransport, transports.CmekServiceGrpcAsyncIOTransport]) -def test_cmek_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cmek_service_grpc_lro_client(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cmek_service_grpc_lro_async_client(): - client = CmekServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_encryption_config_path(): - organization = "squid" - location = "clam" - encryption_config = "whelk" - expected = "organizations/{organization}/locations/{location}/encryptionConfigs/{encryption_config}".format(organization=organization, location=location, encryption_config=encryption_config, ) - actual = CmekServiceClient.encryption_config_path(organization, location, encryption_config) - assert expected == actual - - -def test_parse_encryption_config_path(): - expected = { - "organization": "octopus", - "location": "oyster", - "encryption_config": "nudibranch", - } - path = CmekServiceClient.encryption_config_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_encryption_config_path(path) - assert expected == actual - -def test_organization_location_path(): - organization = "cuttlefish" - location = "mussel" - expected = "organizations/{organization}/locations/{location}".format(organization=organization, location=location, ) - actual = CmekServiceClient.organization_location_path(organization, location) - assert expected == actual - - -def test_parse_organization_location_path(): - expected = { - "organization": "winkle", - "location": "nautilus", - } - path = CmekServiceClient.organization_location_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_organization_location_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CmekServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = CmekServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = CmekServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = CmekServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CmekServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = CmekServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = CmekServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = CmekServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CmekServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = CmekServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CmekServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CmekServiceTransport, '_prep_wrapped_messages') as prep: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CmekServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = CmekServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = CmekServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = CmekServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (CmekServiceClient, transports.CmekServiceGrpcTransport), - (CmekServiceAsyncClient, transports.CmekServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py deleted file mode 100644 index 10e4382404e0..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_content_service.py +++ /dev/null @@ -1,8115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.content_service import ContentServiceAsyncClient -from google.cloud.dataplex_v1.services.content_service import ContentServiceClient -from google.cloud.dataplex_v1.services.content_service import pagers -from google.cloud.dataplex_v1.services.content_service import transports -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import content -from google.cloud.dataplex_v1.types import content as gcd_content -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ContentServiceClient._get_default_mtls_endpoint(None) is None - assert ContentServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ContentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ContentServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ContentServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ContentServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ContentServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ContentServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ContentServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ContentServiceClient._get_client_cert_source(None, False) is None - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ContentServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ContentServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ContentServiceClient._DEFAULT_UNIVERSE - default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ContentServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ContentServiceClient.DEFAULT_MTLS_ENDPOINT - assert ContentServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ContentServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ContentServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ContentServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ContentServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ContentServiceClient._get_universe_domain(None, None) == ContentServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ContentServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = ContentServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = ContentServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (ContentServiceClient, "grpc"), - (ContentServiceAsyncClient, "grpc_asyncio"), - (ContentServiceClient, "rest"), -]) -def test_content_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ContentServiceGrpcTransport, "grpc"), - (transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ContentServiceRestTransport, "rest"), -]) -def test_content_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ContentServiceClient, "grpc"), - (ContentServiceAsyncClient, "grpc_asyncio"), - (ContentServiceClient, "rest"), -]) -def test_content_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_content_service_client_get_transport_class(): - transport = ContentServiceClient.get_transport_class() - available_transports = [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceRestTransport, - ] - assert transport in available_transports - - transport = ContentServiceClient.get_transport_class("grpc") - assert transport == transports.ContentServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ContentServiceClient, transports.ContentServiceRestTransport, "rest"), -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test_content_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ContentServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "true"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "false"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ContentServiceClient, transports.ContentServiceRestTransport, "rest", "true"), - (ContentServiceClient, transports.ContentServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_content_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ContentServiceClient, ContentServiceAsyncClient -]) -@mock.patch.object(ContentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ContentServiceAsyncClient)) -def test_content_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ContentServiceClient, ContentServiceAsyncClient -]) -@mock.patch.object(ContentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceClient)) -@mock.patch.object(ContentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ContentServiceAsyncClient)) -def test_content_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ContentServiceClient._DEFAULT_UNIVERSE - default_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ContentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ContentServiceClient, transports.ContentServiceRestTransport, "rest"), -]) -def test_content_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ContentServiceClient, transports.ContentServiceRestTransport, "rest", None), -]) -def test_content_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_content_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ContentServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", grpc_helpers), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_content_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.CreateContentRequest, - dict, -]) -def test_create_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_content.CreateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_create_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_content.CreateContentRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_content.CreateContentRequest( - parent='parent_value', - ) - -def test_create_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_content] = mock_rpc - request = {} - client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_content] = mock_rpc - - request = {} - await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.CreateContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_content.CreateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_content_async_from_dict(): - await test_create_content_async(request_type=dict) - -def test_create_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.CreateContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = analyze.Content() - client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.CreateContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_content( - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - - -def test_create_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_content( - gcd_content.CreateContentRequest(), - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_content( - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_content( - gcd_content.CreateContentRequest(), - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.UpdateContentRequest, - dict, -]) -def test_update_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_content.UpdateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_update_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_content.UpdateContentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_content.UpdateContentRequest( - ) - -def test_update_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_content] = mock_rpc - request = {} - client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_content] = mock_rpc - - request = {} - await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_content_async(transport: str = 'grpc_asyncio', request_type=gcd_content.UpdateContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_content.UpdateContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_content_async_from_dict(): - await test_update_content_async(request_type=dict) - -def test_update_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.UpdateContentRequest() - - request.content.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = analyze.Content() - client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'content.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_content.UpdateContentRequest() - - request.content.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'content.name=name_value', - ) in kw['metadata'] - - -def test_update_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_content( - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_content( - gcd_content.UpdateContentRequest(), - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_content( - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].content - mock_val = analyze.Content(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_content( - gcd_content.UpdateContentRequest(), - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - content.DeleteContentRequest, - dict, -]) -def test_delete_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.DeleteContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.DeleteContentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.DeleteContentRequest( - name='name_value', - ) - -def test_delete_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc - request = {} - client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_content] = mock_rpc - - request = {} - await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_content_async(transport: str = 'grpc_asyncio', request_type=content.DeleteContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.DeleteContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_content_async_from_dict(): - await test_delete_content_async(request_type=dict) - -def test_delete_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.DeleteContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = None - client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.DeleteContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_content( - content.DeleteContentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_content( - content.DeleteContentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - content.GetContentRequest, - dict, -]) -def test_get_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - response = client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.GetContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -def test_get_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.GetContentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.GetContentRequest( - name='name_value', - ) - -def test_get_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_content] = mock_rpc - request = {} - client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_content] = mock_rpc - - request = {} - await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_content_async(transport: str = 'grpc_asyncio', request_type=content.GetContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - response = await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.GetContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_content_async_from_dict(): - await test_get_content_async(request_type=dict) - -def test_get_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.GetContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = analyze.Content() - client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.GetContentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - await client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_content( - content.GetContentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Content() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_content( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_content( - content.GetContentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - content.ListContentRequest, - dict, -]) -def test_list_content(request_type, transport: str = 'grpc'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse( - next_page_token='next_page_token_value', - ) - response = client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = content.ListContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListContentPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_content_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = content.ListContentRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_content(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == content.ListContentRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_content_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_content] = mock_rpc - request = {} - client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_content_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_content in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_content] = mock_rpc - - request = {} - await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_content_async(transport: str = 'grpc_asyncio', request_type=content.ListContentRequest): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = content.ListContentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListContentAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_content_async_from_dict(): - await test_list_content_async(request_type=dict) - -def test_list_content_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.ListContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = content.ListContentResponse() - client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_content_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = content.ListContentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) - await client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_content_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_content( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_content_flattened_error(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_content( - content.ListContentRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_content_flattened_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = content.ListContentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_content( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_content_flattened_error_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_content( - content.ListContentRequest(), - parent='parent_value', - ) - - -def test_list_content_pager(transport_name: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_content(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Content) - for i in results) -def test_list_content_pages(transport_name: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - pages = list(client.list_content(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_content_async_pager(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_content(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Content) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_content_async_pages(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_content(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_content] = mock_rpc - - request = {} - client.create_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_content_rest_required_fields(request_type=gcd_content.CreateContentRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_content._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_content_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "content", ))) - - -def test_create_content_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_content(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/contentitems" % client.transport._host, args[1]) - - -def test_create_content_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_content( - gcd_content.CreateContentRequest(), - parent='parent_value', - content=analyze.Content(name='name_value'), - ) - - -def test_update_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_content] = mock_rpc - - request = {} - client.update_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_content_rest_required_fields(request_type=gcd_content.UpdateContentRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_content._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_content_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "content", ))) - - -def test_update_content_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - - # get arguments that satisfy an http rule for this method - sample_request = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_content(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{content.name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) - - -def test_update_content_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_content( - gcd_content.UpdateContentRequest(), - content=analyze.Content(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_content] = mock_rpc - - request = {} - client.delete_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_content_rest_required_fields(request_type=content.DeleteContentRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_content_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_content_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_content(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) - - -def test_delete_content_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_content( - content.DeleteContentRequest(), - name='name_value', - ) - - -def test_get_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_content] = mock_rpc - - request = {} - client.get_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_content_rest_required_fields(request_type=content.GetContentRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_content._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_content_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_content_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_content(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/contentitems/**}" % client.transport._host, args[1]) - - -def test_get_content_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_content( - content.GetContentRequest(), - name='name_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("options", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("options", )) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/locations/*/lakes/*/contentitems/**}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_list_content_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_content in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_content] = mock_rpc - - request = {} - client.list_content(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_content(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_content_rest_required_fields(request_type=content.ListContentRequest): - transport_class = transports.ContentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_content._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_content._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = content.ListContentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = content.ListContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_content(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_content_rest_unset_required_fields(): - transport = transports.ContentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_content._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_content_rest_flattened(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = content.ListContentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = content.ListContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_content(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/contentitems" % client.transport._host, args[1]) - - -def test_list_content_rest_flattened_error(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_content( - content.ListContentRequest(), - parent='parent_value', - ) - - -def test_list_content_rest_pager(transport: str = 'rest'): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - analyze.Content(), - ], - next_page_token='abc', - ), - content.ListContentResponse( - content=[], - next_page_token='def', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - ], - next_page_token='ghi', - ), - content.ListContentResponse( - content=[ - analyze.Content(), - analyze.Content(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(content.ListContentResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - pager = client.list_content(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Content) - for i in results) - - pages = list(client.list_content(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ContentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ContentServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ContentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ContentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, - transports.ContentServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ContentServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - call.return_value = analyze.Content() - client.create_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.CreateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - call.return_value = analyze.Content() - client.update_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.UpdateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - call.return_value = None - client.delete_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.DeleteContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - call.return_value = analyze.Content() - client.get_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.GetContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_content_empty_call_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - call.return_value = content.ListContentResponse() - client.list_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.ListContentRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ContentServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.create_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.CreateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.update_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.UpdateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.DeleteContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - )) - await client.get_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.GetContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_content_empty_call_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(content.ListContentResponse( - next_page_token='next_page_token_value', - )) - await client.list_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.ListContentRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = ContentServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_content_rest_bad_request(request_type=gcd_content.CreateContentRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_content(request) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.CreateContentRequest, - dict, -]) -def test_create_content_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request_init["content"] = {'name': 'name_value', 'uid': 'uid_value', 'path': 'path_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'data_text': 'data_text_value', 'sql_script': {'engine': 2}, 'notebook': {'kernel_type': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcd_content.CreateContentRequest.meta.fields["content"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["content"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["content"][field])): - del request_init["content"][field][i][subfield] - else: - del request_init["content"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_content_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_create_content") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_create_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_create_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcd_content.CreateContentRequest.pb(gcd_content.CreateContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analyze.Content.to_json(analyze.Content()) - req.return_value.content = return_value - - request = gcd_content.CreateContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analyze.Content() - post_with_metadata.return_value = analyze.Content(), metadata - - client.create_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_content_rest_bad_request(request_type=gcd_content.UpdateContentRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_content(request) - - -@pytest.mark.parametrize("request_type", [ - gcd_content.UpdateContentRequest, - dict, -]) -def test_update_content_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'content': {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'}} - request_init["content"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4', 'uid': 'uid_value', 'path': 'path_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'data_text': 'data_text_value', 'sql_script': {'engine': 2}, 'notebook': {'kernel_type': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcd_content.UpdateContentRequest.meta.fields["content"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["content"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["content"][field])): - del request_init["content"][field][i][subfield] - else: - del request_init["content"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_content_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_update_content") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_update_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_update_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcd_content.UpdateContentRequest.pb(gcd_content.UpdateContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analyze.Content.to_json(analyze.Content()) - req.return_value.content = return_value - - request = gcd_content.UpdateContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analyze.Content() - post_with_metadata.return_value = analyze.Content(), metadata - - client.update_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_content_rest_bad_request(request_type=content.DeleteContentRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_content(request) - - -@pytest.mark.parametrize("request_type", [ - content.DeleteContentRequest, - dict, -]) -def test_delete_content_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_content(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_content_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_delete_content") as pre: - pre.assert_not_called() - pb_message = content.DeleteContentRequest.pb(content.DeleteContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = content.DeleteContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_content_rest_bad_request(request_type=content.GetContentRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_content(request) - - -@pytest.mark.parametrize("request_type", [ - content.GetContentRequest, - dict, -]) -def test_get_content_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Content( - name='name_value', - uid='uid_value', - path='path_value', - description='description_value', - data_text='data_text_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Content.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Content) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.path == 'path_value' - assert response.description == 'description_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_content_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_content") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_get_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = content.GetContentRequest.pb(content.GetContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analyze.Content.to_json(analyze.Content()) - req.return_value.content = return_value - - request = content.GetContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analyze.Content() - post_with_metadata.return_value = analyze.Content(), metadata - - client.get_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/lakes/sample3/contentitems/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_content_rest_bad_request(request_type=content.ListContentRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_content(request) - - -@pytest.mark.parametrize("request_type", [ - content.ListContentRequest, - dict, -]) -def test_list_content_rest_call_success(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = content.ListContentResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = content.ListContentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_content(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListContentPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_content_rest_interceptors(null_interceptor): - transport = transports.ContentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ContentServiceRestInterceptor(), - ) - client = ContentServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_list_content") as post, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "post_list_content_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ContentServiceRestInterceptor, "pre_list_content") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = content.ListContentRequest.pb(content.ListContentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = content.ListContentResponse.to_json(content.ListContentResponse()) - req.return_value.content = return_value - - request = content.ListContentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = content.ListContentResponse() - post_with_metadata.return_value = content.ListContentResponse(), metadata - - client.list_content(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_content_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_content), - '__call__') as call: - client.create_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.CreateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_content_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_content), - '__call__') as call: - client.update_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_content.UpdateContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_content_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_content), - '__call__') as call: - client.delete_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.DeleteContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_content_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_content), - '__call__') as call: - client.get_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.GetContentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_content_empty_call_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_content), - '__call__') as call: - client.list_content(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = content.ListContentRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ContentServiceGrpcTransport, - ) - -def test_content_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ContentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_content_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ContentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_content', - 'update_content', - 'delete_content', - 'get_content', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'list_content', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_content_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ContentServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_content_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ContentServiceTransport() - adc.assert_called_once() - - -def test_content_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ContentServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, - ], -) -def test_content_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ContentServiceGrpcTransport, - transports.ContentServiceGrpcAsyncIOTransport, - transports.ContentServiceRestTransport, - ], -) -def test_content_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ContentServiceGrpcTransport, grpc_helpers), - (transports.ContentServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_content_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_content_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ContentServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_content_service_host_no_port(transport_name): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_content_service_host_with_port(transport_name): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_content_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ContentServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ContentServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_content._session - session2 = client2.transport.create_content._session - assert session1 != session2 - session1 = client1.transport.update_content._session - session2 = client2.transport.update_content._session - assert session1 != session2 - session1 = client1.transport.delete_content._session - session2 = client2.transport.delete_content._session - assert session1 != session2 - session1 = client1.transport.get_content._session - session2 = client2.transport.get_content._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 - session1 = client1.transport.list_content._session - session2 = client2.transport.list_content._session - assert session1 != session2 -def test_content_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ContentServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_content_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ContentServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ContentServiceGrpcTransport, transports.ContentServiceGrpcAsyncIOTransport]) -def test_content_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_content_path(): - project = "squid" - location = "clam" - lake = "whelk" - content = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format(project=project, location=location, lake=lake, content=content, ) - actual = ContentServiceClient.content_path(project, location, lake, content) - assert expected == actual - - -def test_parse_content_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "content": "mussel", - } - path = ContentServiceClient.content_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_content_path(path) - assert expected == actual - -def test_lake_path(): - project = "winkle" - location = "nautilus" - lake = "scallop" - expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - actual = ContentServiceClient.lake_path(project, location, lake) - assert expected == actual - - -def test_parse_lake_path(): - expected = { - "project": "abalone", - "location": "squid", - "lake": "clam", - } - path = ContentServiceClient.lake_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_lake_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ContentServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ContentServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = ContentServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ContentServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ContentServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ContentServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = ContentServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ContentServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ContentServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ContentServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ContentServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ContentServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ContentServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ContentServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ContentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ContentServiceClient, transports.ContentServiceGrpcTransport), - (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py deleted file mode 100644 index 8b86e6fafa7e..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ /dev/null @@ -1,9435 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceAsyncClient -from google.cloud.dataplex_v1.services.data_scan_service import DataScanServiceClient -from google.cloud.dataplex_v1.services.data_scan_service import pagers -from google.cloud.dataplex_v1.services.data_scan_service import transports -from google.cloud.dataplex_v1.types import data_discovery -from google.cloud.dataplex_v1.types import data_profile -from google.cloud.dataplex_v1.types import data_quality -from google.cloud.dataplex_v1.types import datascans -from google.cloud.dataplex_v1.types import datascans_common -from google.cloud.dataplex_v1.types import processing -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataScanServiceClient._get_default_mtls_endpoint(None) is None - assert DataScanServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataScanServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataScanServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataScanServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataScanServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataScanServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataScanServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataScanServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataScanServiceClient._get_client_cert_source(None, False) is None - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataScanServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataScanServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataScanServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataScanServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataScanServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataScanServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataScanServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataScanServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataScanServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataScanServiceClient._get_universe_domain(None, None) == DataScanServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataScanServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataScanServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataScanServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataScanServiceClient, "grpc"), - (DataScanServiceAsyncClient, "grpc_asyncio"), - (DataScanServiceClient, "rest"), -]) -def test_data_scan_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataScanServiceGrpcTransport, "grpc"), - (transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DataScanServiceRestTransport, "rest"), -]) -def test_data_scan_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataScanServiceClient, "grpc"), - (DataScanServiceAsyncClient, "grpc_asyncio"), - (DataScanServiceClient, "rest"), -]) -def test_data_scan_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_data_scan_service_client_get_transport_class(): - transport = DataScanServiceClient.get_transport_class() - available_transports = [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceRestTransport, - ] - assert transport in available_transports - - transport = DataScanServiceClient.get_transport_class("grpc") - assert transport == transports.DataScanServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest"), -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test_data_scan_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataScanServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "true"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", "false"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", "true"), - (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_scan_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataScanServiceClient, DataScanServiceAsyncClient -]) -@mock.patch.object(DataScanServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataScanServiceAsyncClient)) -def test_data_scan_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataScanServiceClient, DataScanServiceAsyncClient -]) -@mock.patch.object(DataScanServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceClient)) -@mock.patch.object(DataScanServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataScanServiceAsyncClient)) -def test_data_scan_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest"), -]) -def test_data_scan_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DataScanServiceClient, transports.DataScanServiceRestTransport, "rest", None), -]) -def test_data_scan_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_scan_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataScanServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc", grpc_helpers), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_scan_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.CreateDataScanRequest, - dict, -]) -def test_create_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.CreateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.CreateDataScanRequest( - parent='parent_value', - data_scan_id='data_scan_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.CreateDataScanRequest( - parent='parent_value', - data_scan_id='data_scan_id_value', - ) - -def test_create_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc - request = {} - client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_scan] = mock_rpc - - request = {} - await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.CreateDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.CreateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_scan_async_from_dict(): - await test_create_data_scan_async(request_type=dict) - -def test_create_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.CreateDataScanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.CreateDataScanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_scan( - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].data_scan_id - mock_val = 'data_scan_id_value' - assert arg == mock_val - - -def test_create_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_scan( - datascans.CreateDataScanRequest(), - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_scan( - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].data_scan_id - mock_val = 'data_scan_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_scan( - datascans.CreateDataScanRequest(), - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.UpdateDataScanRequest, - dict, -]) -def test_update_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.UpdateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.UpdateDataScanRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.UpdateDataScanRequest( - ) - -def test_update_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc - request = {} - client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_scan] = mock_rpc - - request = {} - await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.UpdateDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.UpdateDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_scan_async_from_dict(): - await test_update_data_scan_async(request_type=dict) - -def test_update_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.UpdateDataScanRequest() - - request.data_scan.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_scan.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.UpdateDataScanRequest() - - request.data_scan.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_scan.name=name_value', - ) in kw['metadata'] - - -def test_update_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_scan( - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_scan( - datascans.UpdateDataScanRequest(), - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_scan( - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_scan - mock_val = datascans.DataScan(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_scan( - datascans.UpdateDataScanRequest(), - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.DeleteDataScanRequest, - dict, -]) -def test_delete_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.DeleteDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.DeleteDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.DeleteDataScanRequest( - name='name_value', - ) - -def test_delete_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc - request = {} - client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_scan] = mock_rpc - - request = {} - await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.DeleteDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.DeleteDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_scan_async_from_dict(): - await test_delete_data_scan_async(request_type=dict) - -def test_delete_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.DeleteDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.DeleteDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_scan( - datascans.DeleteDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_scan( - datascans.DeleteDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanRequest, - dict, -]) -def test_get_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - ) - response = client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScan) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -def test_get_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GetDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GetDataScanRequest( - name='name_value', - ) - -def test_get_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc - request = {} - client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_scan] = mock_rpc - - request = {} - await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - )) - response = await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScan) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.asyncio -async def test_get_data_scan_async_from_dict(): - await test_get_data_scan_async(request_type=dict) - -def test_get_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = datascans.DataScan() - client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) - await client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan( - datascans.GetDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScan() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_scan( - datascans.GetDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScansRequest, - dict, -]) -def test_list_data_scans(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScansPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_data_scans_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.ListDataScansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_scans(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.ListDataScansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_scans_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scans in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc - request = {} - client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scans_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_scans in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_scans] = mock_rpc - - request = {} - await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_scans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scans_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScansRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScansAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_data_scans_async_from_dict(): - await test_list_data_scans_async(request_type=dict) - -def test_list_data_scans_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = datascans.ListDataScansResponse() - client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_scans_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) - await client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_scans_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_scans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_scans_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scans( - datascans.ListDataScansRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_scans_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScansResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_scans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_scans_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_scans( - datascans.ListDataScansRequest(), - parent='parent_value', - ) - - -def test_list_data_scans_pager(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_scans(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScan) - for i in results) -def test_list_data_scans_pages(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_scans(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_scans_async_pager(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_scans(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datascans.DataScan) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_scans_async_pages(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_scans(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datascans.RunDataScanRequest, - dict, -]) -def test_run_data_scan(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse( - ) - response = client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.RunDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.RunDataScanResponse) - - -def test_run_data_scan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.RunDataScanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.run_data_scan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.RunDataScanRequest( - name='name_value', - ) - -def test_run_data_scan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc - request = {} - client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_data_scan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.run_data_scan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.run_data_scan] = mock_rpc - - request = {} - await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_data_scan_async(transport: str = 'grpc_asyncio', request_type=datascans.RunDataScanRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( - )) - response = await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.RunDataScanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.RunDataScanResponse) - - -@pytest.mark.asyncio -async def test_run_data_scan_async_from_dict(): - await test_run_data_scan_async(request_type=dict) - -def test_run_data_scan_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.RunDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = datascans.RunDataScanResponse() - client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_data_scan_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.RunDataScanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) - await client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_run_data_scan_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_run_data_scan_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_data_scan( - datascans.RunDataScanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_run_data_scan_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.RunDataScanResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_data_scan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_data_scan_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_data_scan( - datascans.RunDataScanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanJobRequest, - dict, -]) -def test_get_data_scan_job(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - ) - response = client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScanJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == datascans.DataScanJob.State.RUNNING - assert response.message == 'message_value' - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -def test_get_data_scan_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GetDataScanJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_scan_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GetDataScanJobRequest( - name='name_value', - ) - -def test_get_data_scan_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc - request = {} - client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_scan_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_scan_job] = mock_rpc - - request = {} - await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_scan_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_scan_job_async(transport: str = 'grpc_asyncio', request_type=datascans.GetDataScanJobRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - )) - response = await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GetDataScanJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScanJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == datascans.DataScanJob.State.RUNNING - assert response.message == 'message_value' - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.asyncio -async def test_get_data_scan_job_async_from_dict(): - await test_get_data_scan_job_async(request_type=dict) - -def test_get_data_scan_job_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = datascans.DataScanJob() - client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_scan_job_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GetDataScanJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) - await client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_scan_job_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_scan_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_scan_job_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan_job( - datascans.GetDataScanJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_scan_job_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.DataScanJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_scan_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_scan_job_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_scan_job( - datascans.GetDataScanJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScanJobsRequest, - dict, -]) -def test_list_data_scan_jobs(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScanJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScanJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_scan_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.ListDataScanJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_scan_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.ListDataScanJobsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_data_scan_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc - request = {} - client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scan_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_scan_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_scan_jobs] = mock_rpc - - request = {} - await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_scan_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async(transport: str = 'grpc_asyncio', request_type=datascans.ListDataScanJobsRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.ListDataScanJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScanJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_from_dict(): - await test_list_data_scan_jobs_async(request_type=dict) - -def test_list_data_scan_jobs_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScanJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = datascans.ListDataScanJobsResponse() - client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.ListDataScanJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) - await client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_scan_jobs_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_scan_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_scan_jobs_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scan_jobs( - datascans.ListDataScanJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.ListDataScanJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_scan_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_scan_jobs( - datascans.ListDataScanJobsRequest(), - parent='parent_value', - ) - - -def test_list_data_scan_jobs_pager(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_scan_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScanJob) - for i in results) -def test_list_data_scan_jobs_pages(transport_name: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_scan_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_pager(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_scan_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datascans.DataScanJob) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_scan_jobs_async_pages(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_scan_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datascans.GenerateDataQualityRulesRequest, - dict, -]) -def test_generate_data_quality_rules(request_type, transport: str = 'grpc'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse( - ) - response = client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datascans.GenerateDataQualityRulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.GenerateDataQualityRulesResponse) - - -def test_generate_data_quality_rules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datascans.GenerateDataQualityRulesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_data_quality_rules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datascans.GenerateDataQualityRulesRequest( - name='name_value', - ) - -def test_generate_data_quality_rules_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc - request = {} - client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.generate_data_quality_rules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.generate_data_quality_rules in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.generate_data_quality_rules] = mock_rpc - - request = {} - await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.generate_data_quality_rules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async(transport: str = 'grpc_asyncio', request_type=datascans.GenerateDataQualityRulesRequest): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( - )) - response = await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datascans.GenerateDataQualityRulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.GenerateDataQualityRulesResponse) - - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_async_from_dict(): - await test_generate_data_quality_rules_async(request_type=dict) - -def test_generate_data_quality_rules_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GenerateDataQualityRulesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = datascans.GenerateDataQualityRulesResponse() - client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datascans.GenerateDataQualityRulesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) - await client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_generate_data_quality_rules_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.generate_data_quality_rules( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_generate_data_quality_rules_flattened_error(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.generate_data_quality_rules( - datascans.GenerateDataQualityRulesRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_flattened_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datascans.GenerateDataQualityRulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.generate_data_quality_rules( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_generate_data_quality_rules_flattened_error_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.generate_data_quality_rules( - datascans.GenerateDataQualityRulesRequest(), - name='name_value', - ) - - -def test_create_data_scan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_scan] = mock_rpc - - request = {} - client.create_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_data_scan_rest_required_fields(request_type=datascans.CreateDataScanRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["data_scan_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "dataScanId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "dataScanId" in jsonified_request - assert jsonified_request["dataScanId"] == request_init["data_scan_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["dataScanId"] = 'data_scan_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_scan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("data_scan_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "dataScanId" in jsonified_request - assert jsonified_request["dataScanId"] == 'data_scan_id_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_data_scan(request) - - expected_params = [ - ( - "dataScanId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_data_scan_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_data_scan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("dataScanId", "validateOnly", )) & set(("parent", "dataScan", "dataScanId", ))) - - -def test_create_data_scan_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_scan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataScans" % client.transport._host, args[1]) - - -def test_create_data_scan_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_scan( - datascans.CreateDataScanRequest(), - parent='parent_value', - data_scan=datascans.DataScan(name='name_value'), - data_scan_id='data_scan_id_value', - ) - - -def test_update_data_scan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_scan] = mock_rpc - - request = {} - client.update_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_scan_rest_required_fields(request_type=datascans.UpdateDataScanRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_scan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_scan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_scan_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_scan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("dataScan", ))) - - -def test_update_data_scan_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_scan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_scan.name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) - - -def test_update_data_scan_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_scan( - datascans.UpdateDataScanRequest(), - data_scan=datascans.DataScan(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_data_scan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_scan] = mock_rpc - - request = {} - client.delete_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_data_scan_rest_required_fields(request_type=datascans.DeleteDataScanRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_scan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_data_scan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_data_scan_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_data_scan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", )) & set(("name", ))) - - -def test_delete_data_scan_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_data_scan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) - - -def test_delete_data_scan_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_scan( - datascans.DeleteDataScanRequest(), - name='name_value', - ) - - -def test_get_data_scan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan] = mock_rpc - - request = {} - client.get_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_scan_rest_required_fields(request_type=datascans.GetDataScanRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.DataScan() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.DataScan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_scan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_scan_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_scan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_data_scan_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.DataScan() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.DataScan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_scan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}" % client.transport._host, args[1]) - - -def test_get_data_scan_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan( - datascans.GetDataScanRequest(), - name='name_value', - ) - - -def test_list_data_scans_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scans in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scans] = mock_rpc - - request = {} - client.list_data_scans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_scans_rest_required_fields(request_type=datascans.ListDataScansRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scans._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scans._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScansResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.ListDataScansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_scans(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_scans_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_scans._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_scans_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScansResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.ListDataScansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_scans(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataScans" % client.transport._host, args[1]) - - -def test_list_data_scans_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scans( - datascans.ListDataScansRequest(), - parent='parent_value', - ) - - -def test_list_data_scans_rest_pager(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - datascans.DataScan(), - ], - next_page_token='abc', - ), - datascans.ListDataScansResponse( - data_scans=[], - next_page_token='def', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - ], - next_page_token='ghi', - ), - datascans.ListDataScansResponse( - data_scans=[ - datascans.DataScan(), - datascans.DataScan(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datascans.ListDataScansResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_data_scans(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScan) - for i in results) - - pages = list(client.list_data_scans(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_run_data_scan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_data_scan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_data_scan] = mock_rpc - - request = {} - client.run_data_scan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_data_scan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_run_data_scan_rest_required_fields(request_type=datascans.RunDataScanRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_data_scan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.RunDataScanResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.RunDataScanResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.run_data_scan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_data_scan_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_data_scan._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_run_data_scan_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.RunDataScanResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.RunDataScanResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.run_data_scan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}:run" % client.transport._host, args[1]) - - -def test_run_data_scan_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_data_scan( - datascans.RunDataScanRequest(), - name='name_value', - ) - - -def test_get_data_scan_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_scan_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_scan_job] = mock_rpc - - request = {} - client.get_data_scan_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_scan_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_scan_job_rest_required_fields(request_type=datascans.GetDataScanJobRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_scan_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.DataScanJob() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.DataScanJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_scan_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_scan_job_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_scan_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_data_scan_job_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.DataScanJob() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.DataScanJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_scan_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*/jobs/*}" % client.transport._host, args[1]) - - -def test_get_data_scan_job_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_scan_job( - datascans.GetDataScanJobRequest(), - name='name_value', - ) - - -def test_list_data_scan_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_scan_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_scan_jobs] = mock_rpc - - request = {} - client.list_data_scan_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_scan_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_scan_jobs_rest_required_fields(request_type=datascans.ListDataScanJobsRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scan_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_scan_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScanJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.ListDataScanJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_scan_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_scan_jobs_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_scan_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_scan_jobs_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScanJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.ListDataScanJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_scan_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataScans/*}/jobs" % client.transport._host, args[1]) - - -def test_list_data_scan_jobs_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_scan_jobs( - datascans.ListDataScanJobsRequest(), - parent='parent_value', - ) - - -def test_list_data_scan_jobs_rest_pager(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - next_page_token='abc', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[], - next_page_token='def', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - ], - next_page_token='ghi', - ), - datascans.ListDataScanJobsResponse( - data_scan_jobs=[ - datascans.DataScanJob(), - datascans.DataScanJob(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datascans.ListDataScanJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} - - pager = client.list_data_scan_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datascans.DataScanJob) - for i in results) - - pages = list(client.list_data_scan_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_generate_data_quality_rules_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.generate_data_quality_rules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_data_quality_rules] = mock_rpc - - request = {} - client.generate_data_quality_rules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.generate_data_quality_rules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_generate_data_quality_rules_rest_required_fields(request_type=datascans.GenerateDataQualityRulesRequest): - transport_class = transports.DataScanServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_data_quality_rules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_data_quality_rules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datascans.GenerateDataQualityRulesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.generate_data_quality_rules(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_generate_data_quality_rules_rest_unset_required_fields(): - transport = transports.DataScanServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.generate_data_quality_rules._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_generate_data_quality_rules_rest_flattened(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.GenerateDataQualityRulesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.generate_data_quality_rules(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataScans/*}:generateDataQualityRules" % client.transport._host, args[1]) - - -def test_generate_data_quality_rules_rest_flattened_error(transport: str = 'rest'): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.generate_data_quality_rules( - datascans.GenerateDataQualityRulesRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataScanServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataScanServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataScanServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataScanServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, - transports.DataScanServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataScanServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.CreateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.UpdateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.DeleteDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - call.return_value = datascans.DataScan() - client.get_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scans_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - call.return_value = datascans.ListDataScansResponse() - client.list_data_scans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_data_scan_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - call.return_value = datascans.RunDataScanResponse() - client.run_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.RunDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_job_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - call.return_value = datascans.DataScanJob() - client.get_data_scan_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scan_jobs_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - call.return_value = datascans.ListDataScanJobsResponse() - client.list_data_scan_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScanJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_generate_data_quality_rules_empty_call_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - call.return_value = datascans.GenerateDataQualityRulesResponse() - client.generate_data_quality_rules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GenerateDataQualityRulesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataScanServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.CreateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.UpdateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.DeleteDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - )) - await client.get_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_scans_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_data_scans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_data_scan_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.RunDataScanResponse( - )) - await client.run_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.RunDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_scan_job_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - )) - await client.get_data_scan_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_scan_jobs_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_scan_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScanJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_generate_data_quality_rules_empty_call_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datascans.GenerateDataQualityRulesResponse( - )) - await client.generate_data_quality_rules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GenerateDataQualityRulesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DataScanServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_data_scan_rest_bad_request(request_type=datascans.CreateDataScanRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_scan(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.CreateDataScanRequest, - dict, -]) -def test_create_data_scan_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["data_scan"] = {'name': 'name_value', 'uid': 'uid_value', 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'data': {'entity': 'entity_value', 'resource': 'resource_value'}, 'execution_spec': {'trigger': {'on_demand': {}, 'schedule': {'cron': 'cron_value'}}, 'field': 'field_value'}, 'execution_status': {'latest_job_start_time': {}, 'latest_job_end_time': {}, 'latest_job_create_time': {}}, 'type_': 1, 'data_quality_spec': {'rules': [{'range_expectation': {'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'non_null_expectation': {}, 'set_expectation': {'values': ['values_value1', 'values_value2']}, 'regex_expectation': {'regex': 'regex_value'}, 'uniqueness_expectation': {}, 'statistic_range_expectation': {'statistic': 1, 'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'row_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'table_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'sql_assertion': {'sql_statement': 'sql_statement_value'}, 'column': 'column_value', 'ignore_null': True, 'dimension': 'dimension_value', 'threshold': 0.973, 'name': 'name_value', 'description': 'description_value', 'suspended': True}], 'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}, 'notification_report': {'recipients': {'emails': ['emails_value1', 'emails_value2']}, 'score_threshold_trigger': {'score_threshold': 0.1608}, 'job_failure_trigger': {}, 'job_end_trigger': {}}}, 'catalog_publishing_enabled': True}, 'data_profile_spec': {'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}}, 'include_fields': {'field_names': ['field_names_value1', 'field_names_value2']}, 'exclude_fields': {}}, 'data_discovery_spec': {'bigquery_publishing_config': {'table_type': 1, 'connection': 'connection_value', 'location': 'location_value', 'project': 'project_value'}, 'storage_config': {'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'type_inference_disabled': True, 'quote': 'quote_value'}, 'json_options': {'encoding': 'encoding_value', 'type_inference_disabled': True}}}, 'data_quality_result': {'passed': True, 'score': 0.54, 'dimensions': [{'dimension': {'name': 'name_value'}, 'passed': True, 'score': 0.54}], 'columns': [{'column': 'column_value', 'score': 0.54, 'passed': True, 'dimensions': {}}], 'rules': [{'rule': {}, 'passed': True, 'evaluated_count': 1603, 'passed_count': 1288, 'null_count': 1091, 'pass_ratio': 0.1077, 'failing_rows_query': 'failing_rows_query_value', 'assertion_row_count': 2071}], 'row_count': 992, 'scanned_data': {'incremental_field': {'field': 'field_value', 'start': 'start_value', 'end': 'end_value'}}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}, 'catalog_publishing_status': {'state': 1}}, 'data_profile_result': {'row_count': 992, 'profile': {'fields': [{'name': 'name_value', 'type_': 'type__value', 'mode': 'mode_value', 'profile': {'null_ratio': 0.1081, 'distinct_ratio': 0.1504, 'top_n_values': [{'value': 'value_value', 'count': 553, 'ratio': 0.543}], 'string_profile': {'min_length': 1061, 'max_length': 1063, 'average_length': 0.1468}, 'integer_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 419, 'quartiles': [987, 988], 'max_': 421}, 'double_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 0.419, 'quartiles': [0.987, 0.988], 'max_': 0.421}}}]}, 'scanned_data': {}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}}, 'data_discovery_result': {'bigquery_publishing': {'dataset': 'dataset_value', 'location': 'location_value'}, 'scan_statistics': {'scanned_file_count': 1891, 'data_processed_bytes': 2119, 'files_excluded': 1472, 'tables_created': 1458, 'tables_deleted': 1457, 'tables_updated': 1473, 'filesets_created': 1686, 'filesets_deleted': 1685, 'filesets_updated': 1701}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datascans.CreateDataScanRequest.meta.fields["data_scan"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_scan"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_scan"][field])): - del request_init["data_scan"][field][i][subfield] - else: - del request_init["data_scan"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_scan(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_scan_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_create_data_scan") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_create_data_scan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_create_data_scan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.CreateDataScanRequest.pb(datascans.CreateDataScanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = datascans.CreateDataScanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_scan_rest_bad_request(request_type=datascans.UpdateDataScanRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_scan(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.UpdateDataScanRequest, - dict, -]) -def test_update_data_scan_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_scan': {'name': 'projects/sample1/locations/sample2/dataScans/sample3'}} - request_init["data_scan"] = {'name': 'projects/sample1/locations/sample2/dataScans/sample3', 'uid': 'uid_value', 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'data': {'entity': 'entity_value', 'resource': 'resource_value'}, 'execution_spec': {'trigger': {'on_demand': {}, 'schedule': {'cron': 'cron_value'}}, 'field': 'field_value'}, 'execution_status': {'latest_job_start_time': {}, 'latest_job_end_time': {}, 'latest_job_create_time': {}}, 'type_': 1, 'data_quality_spec': {'rules': [{'range_expectation': {'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'non_null_expectation': {}, 'set_expectation': {'values': ['values_value1', 'values_value2']}, 'regex_expectation': {'regex': 'regex_value'}, 'uniqueness_expectation': {}, 'statistic_range_expectation': {'statistic': 1, 'min_value': 'min_value_value', 'max_value': 'max_value_value', 'strict_min_enabled': True, 'strict_max_enabled': True}, 'row_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'table_condition_expectation': {'sql_expression': 'sql_expression_value'}, 'sql_assertion': {'sql_statement': 'sql_statement_value'}, 'column': 'column_value', 'ignore_null': True, 'dimension': 'dimension_value', 'threshold': 0.973, 'name': 'name_value', 'description': 'description_value', 'suspended': True}], 'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}, 'notification_report': {'recipients': {'emails': ['emails_value1', 'emails_value2']}, 'score_threshold_trigger': {'score_threshold': 0.1608}, 'job_failure_trigger': {}, 'job_end_trigger': {}}}, 'catalog_publishing_enabled': True}, 'data_profile_spec': {'sampling_percent': 0.17070000000000002, 'row_filter': 'row_filter_value', 'post_scan_actions': {'bigquery_export': {'results_table': 'results_table_value'}}, 'include_fields': {'field_names': ['field_names_value1', 'field_names_value2']}, 'exclude_fields': {}}, 'data_discovery_spec': {'bigquery_publishing_config': {'table_type': 1, 'connection': 'connection_value', 'location': 'location_value', 'project': 'project_value'}, 'storage_config': {'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'type_inference_disabled': True, 'quote': 'quote_value'}, 'json_options': {'encoding': 'encoding_value', 'type_inference_disabled': True}}}, 'data_quality_result': {'passed': True, 'score': 0.54, 'dimensions': [{'dimension': {'name': 'name_value'}, 'passed': True, 'score': 0.54}], 'columns': [{'column': 'column_value', 'score': 0.54, 'passed': True, 'dimensions': {}}], 'rules': [{'rule': {}, 'passed': True, 'evaluated_count': 1603, 'passed_count': 1288, 'null_count': 1091, 'pass_ratio': 0.1077, 'failing_rows_query': 'failing_rows_query_value', 'assertion_row_count': 2071}], 'row_count': 992, 'scanned_data': {'incremental_field': {'field': 'field_value', 'start': 'start_value', 'end': 'end_value'}}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}, 'catalog_publishing_status': {'state': 1}}, 'data_profile_result': {'row_count': 992, 'profile': {'fields': [{'name': 'name_value', 'type_': 'type__value', 'mode': 'mode_value', 'profile': {'null_ratio': 0.1081, 'distinct_ratio': 0.1504, 'top_n_values': [{'value': 'value_value', 'count': 553, 'ratio': 0.543}], 'string_profile': {'min_length': 1061, 'max_length': 1063, 'average_length': 0.1468}, 'integer_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 419, 'quartiles': [987, 988], 'max_': 421}, 'double_profile': {'average': 0.731, 'standard_deviation': 0.1907, 'min_': 0.419, 'quartiles': [0.987, 0.988], 'max_': 0.421}}}]}, 'scanned_data': {}, 'post_scan_actions_result': {'bigquery_export_result': {'state': 1, 'message': 'message_value'}}}, 'data_discovery_result': {'bigquery_publishing': {'dataset': 'dataset_value', 'location': 'location_value'}, 'scan_statistics': {'scanned_file_count': 1891, 'data_processed_bytes': 2119, 'files_excluded': 1472, 'tables_created': 1458, 'tables_deleted': 1457, 'tables_updated': 1473, 'filesets_created': 1686, 'filesets_deleted': 1685, 'filesets_updated': 1701}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datascans.UpdateDataScanRequest.meta.fields["data_scan"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_scan"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_scan"][field])): - del request_init["data_scan"][field][i][subfield] - else: - del request_init["data_scan"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_scan(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_scan_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_update_data_scan") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_update_data_scan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_update_data_scan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.UpdateDataScanRequest.pb(datascans.UpdateDataScanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = datascans.UpdateDataScanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_data_scan_rest_bad_request(request_type=datascans.DeleteDataScanRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_scan(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.DeleteDataScanRequest, - dict, -]) -def test_delete_data_scan_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_scan(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_scan_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_delete_data_scan") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_delete_data_scan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_delete_data_scan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.DeleteDataScanRequest.pb(datascans.DeleteDataScanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = datascans.DeleteDataScanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_scan_rest_bad_request(request_type=datascans.GetDataScanRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_scan(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanRequest, - dict, -]) -def test_get_data_scan_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.DataScan( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - type_=datascans.DataScanType.DATA_QUALITY, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.DataScan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_scan(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScan) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_scan_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_get_data_scan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.GetDataScanRequest.pb(datascans.GetDataScanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.DataScan.to_json(datascans.DataScan()) - req.return_value.content = return_value - - request = datascans.GetDataScanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.DataScan() - post_with_metadata.return_value = datascans.DataScan(), metadata - - client.get_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_scans_rest_bad_request(request_type=datascans.ListDataScansRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_scans(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScansRequest, - dict, -]) -def test_list_data_scans_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.ListDataScansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_scans(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScansPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_scans_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scans") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scans_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_list_data_scans") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.ListDataScansRequest.pb(datascans.ListDataScansRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.ListDataScansResponse.to_json(datascans.ListDataScansResponse()) - req.return_value.content = return_value - - request = datascans.ListDataScansRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.ListDataScansResponse() - post_with_metadata.return_value = datascans.ListDataScansResponse(), metadata - - client.list_data_scans(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_run_data_scan_rest_bad_request(request_type=datascans.RunDataScanRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.run_data_scan(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.RunDataScanRequest, - dict, -]) -def test_run_data_scan_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.RunDataScanResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.RunDataScanResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.run_data_scan(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.RunDataScanResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_data_scan_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_run_data_scan") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_run_data_scan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_run_data_scan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.RunDataScanRequest.pb(datascans.RunDataScanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.RunDataScanResponse.to_json(datascans.RunDataScanResponse()) - req.return_value.content = return_value - - request = datascans.RunDataScanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.RunDataScanResponse() - post_with_metadata.return_value = datascans.RunDataScanResponse(), metadata - - client.run_data_scan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_scan_job_rest_bad_request(request_type=datascans.GetDataScanJobRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_scan_job(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.GetDataScanJobRequest, - dict, -]) -def test_get_data_scan_job_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3/jobs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.DataScanJob( - name='name_value', - uid='uid_value', - state=datascans.DataScanJob.State.RUNNING, - message='message_value', - type_=datascans.DataScanType.DATA_QUALITY, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.DataScanJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_scan_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.DataScanJob) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == datascans.DataScanJob.State.RUNNING - assert response.message == 'message_value' - assert response.type_ == datascans.DataScanType.DATA_QUALITY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_scan_job_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_job") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_get_data_scan_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_get_data_scan_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.GetDataScanJobRequest.pb(datascans.GetDataScanJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.DataScanJob.to_json(datascans.DataScanJob()) - req.return_value.content = return_value - - request = datascans.GetDataScanJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.DataScanJob() - post_with_metadata.return_value = datascans.DataScanJob(), metadata - - client.get_data_scan_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_scan_jobs_rest_bad_request(request_type=datascans.ListDataScanJobsRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_scan_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.ListDataScanJobsRequest, - dict, -]) -def test_list_data_scan_jobs_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.ListDataScanJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.ListDataScanJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_scan_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataScanJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_scan_jobs_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scan_jobs") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_list_data_scan_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_list_data_scan_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.ListDataScanJobsRequest.pb(datascans.ListDataScanJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.ListDataScanJobsResponse.to_json(datascans.ListDataScanJobsResponse()) - req.return_value.content = return_value - - request = datascans.ListDataScanJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.ListDataScanJobsResponse() - post_with_metadata.return_value = datascans.ListDataScanJobsResponse(), metadata - - client.list_data_scan_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_generate_data_quality_rules_rest_bad_request(request_type=datascans.GenerateDataQualityRulesRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.generate_data_quality_rules(request) - - -@pytest.mark.parametrize("request_type", [ - datascans.GenerateDataQualityRulesRequest, - dict, -]) -def test_generate_data_quality_rules_rest_call_success(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataScans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datascans.GenerateDataQualityRulesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datascans.GenerateDataQualityRulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.generate_data_quality_rules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datascans.GenerateDataQualityRulesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_data_quality_rules_rest_interceptors(null_interceptor): - transport = transports.DataScanServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataScanServiceRestInterceptor(), - ) - client = DataScanServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_generate_data_quality_rules") as post, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "post_generate_data_quality_rules_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataScanServiceRestInterceptor, "pre_generate_data_quality_rules") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datascans.GenerateDataQualityRulesRequest.pb(datascans.GenerateDataQualityRulesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datascans.GenerateDataQualityRulesResponse.to_json(datascans.GenerateDataQualityRulesResponse()) - req.return_value.content = return_value - - request = datascans.GenerateDataQualityRulesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datascans.GenerateDataQualityRulesResponse() - post_with_metadata.return_value = datascans.GenerateDataQualityRulesResponse(), metadata - - client.generate_data_quality_rules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_scan_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_scan), - '__call__') as call: - client.create_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.CreateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_scan_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_scan), - '__call__') as call: - client.update_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.UpdateDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_scan_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_scan), - '__call__') as call: - client.delete_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.DeleteDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan), - '__call__') as call: - client.get_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scans_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scans), - '__call__') as call: - client.list_data_scans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_data_scan_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_data_scan), - '__call__') as call: - client.run_data_scan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.RunDataScanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_scan_job_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_scan_job), - '__call__') as call: - client.get_data_scan_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GetDataScanJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_scan_jobs_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_scan_jobs), - '__call__') as call: - client.list_data_scan_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.ListDataScanJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_generate_data_quality_rules_empty_call_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.generate_data_quality_rules), - '__call__') as call: - client.generate_data_quality_rules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datascans.GenerateDataQualityRulesRequest() - - assert args[0] == request_msg - - -def test_data_scan_service_rest_lro_client(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataScanServiceGrpcTransport, - ) - -def test_data_scan_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataScanServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_scan_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataScanServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_scan', - 'update_data_scan', - 'delete_data_scan', - 'get_data_scan', - 'list_data_scans', - 'run_data_scan', - 'get_data_scan_job', - 'list_data_scan_jobs', - 'generate_data_quality_rules', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_scan_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataScanServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_scan_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_scan_service.transports.DataScanServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataScanServiceTransport() - adc.assert_called_once() - - -def test_data_scan_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataScanServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, - ], -) -def test_data_scan_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataScanServiceGrpcTransport, - transports.DataScanServiceGrpcAsyncIOTransport, - transports.DataScanServiceRestTransport, - ], -) -def test_data_scan_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataScanServiceGrpcTransport, grpc_helpers), - (transports.DataScanServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_scan_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_data_scan_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DataScanServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_scan_service_host_no_port(transport_name): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_scan_service_host_with_port(transport_name): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_data_scan_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DataScanServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DataScanServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_data_scan._session - session2 = client2.transport.create_data_scan._session - assert session1 != session2 - session1 = client1.transport.update_data_scan._session - session2 = client2.transport.update_data_scan._session - assert session1 != session2 - session1 = client1.transport.delete_data_scan._session - session2 = client2.transport.delete_data_scan._session - assert session1 != session2 - session1 = client1.transport.get_data_scan._session - session2 = client2.transport.get_data_scan._session - assert session1 != session2 - session1 = client1.transport.list_data_scans._session - session2 = client2.transport.list_data_scans._session - assert session1 != session2 - session1 = client1.transport.run_data_scan._session - session2 = client2.transport.run_data_scan._session - assert session1 != session2 - session1 = client1.transport.get_data_scan_job._session - session2 = client2.transport.get_data_scan_job._session - assert session1 != session2 - session1 = client1.transport.list_data_scan_jobs._session - session2 = client2.transport.list_data_scan_jobs._session - assert session1 != session2 - session1 = client1.transport.generate_data_quality_rules._session - session2 = client2.transport.generate_data_quality_rules._session - assert session1 != session2 -def test_data_scan_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataScanServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_scan_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataScanServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataScanServiceGrpcTransport, transports.DataScanServiceGrpcAsyncIOTransport]) -def test_data_scan_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_scan_service_grpc_lro_client(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_scan_service_grpc_lro_async_client(): - client = DataScanServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_connection_path(): - project = "squid" - location = "clam" - connection = "whelk" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = DataScanServiceClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection": "nudibranch", - } - path = DataScanServiceClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_connection_path(path) - assert expected == actual - -def test_data_scan_path(): - project = "cuttlefish" - location = "mussel" - dataScan = "winkle" - expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format(project=project, location=location, dataScan=dataScan, ) - actual = DataScanServiceClient.data_scan_path(project, location, dataScan) - assert expected == actual - - -def test_parse_data_scan_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dataScan": "abalone", - } - path = DataScanServiceClient.data_scan_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_data_scan_path(path) - assert expected == actual - -def test_data_scan_job_path(): - project = "squid" - location = "clam" - dataScan = "whelk" - job = "octopus" - expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format(project=project, location=location, dataScan=dataScan, job=job, ) - actual = DataScanServiceClient.data_scan_job_path(project, location, dataScan, job) - assert expected == actual - - -def test_parse_data_scan_job_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataScan": "cuttlefish", - "job": "mussel", - } - path = DataScanServiceClient.data_scan_job_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_data_scan_job_path(path) - assert expected == actual - -def test_dataset_path(): - project = "winkle" - dataset = "nautilus" - expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - actual = DataScanServiceClient.dataset_path(project, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "scallop", - "dataset": "abalone", - } - path = DataScanServiceClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_dataset_path(path) - assert expected == actual - -def test_entity_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - entity = "oyster" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - actual = DataScanServiceClient.entity_path(project, location, lake, zone, entity) - assert expected == actual - - -def test_parse_entity_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "lake": "mussel", - "zone": "winkle", - "entity": "nautilus", - } - path = DataScanServiceClient.entity_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_entity_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataScanServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = DataScanServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataScanServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = DataScanServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataScanServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = DataScanServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = DataScanServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = DataScanServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataScanServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = DataScanServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataScanServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataScanServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataScanServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DataScanServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport), - (DataScanServiceAsyncClient, transports.DataScanServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py deleted file mode 100644 index 5f1248bbf204..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ /dev/null @@ -1,14068 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceAsyncClient -from google.cloud.dataplex_v1.services.data_taxonomy_service import DataTaxonomyServiceClient -from google.cloud.dataplex_v1.services.data_taxonomy_service import pagers -from google.cloud.dataplex_v1.services.data_taxonomy_service import transports -from google.cloud.dataplex_v1.types import data_taxonomy -from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy -from google.cloud.dataplex_v1.types import security -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(None) is None - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataTaxonomyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataTaxonomyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTaxonomyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataTaxonomyServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataTaxonomyServiceClient._get_client_cert_source(None, False) is None - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataTaxonomyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataTaxonomyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataTaxonomyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataTaxonomyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataTaxonomyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTaxonomyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataTaxonomyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataTaxonomyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataTaxonomyServiceClient._get_universe_domain(None, None) == DataTaxonomyServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataTaxonomyServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataTaxonomyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataTaxonomyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTaxonomyServiceClient, "grpc"), - (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), - (DataTaxonomyServiceClient, "rest"), -]) -def test_data_taxonomy_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DataTaxonomyServiceRestTransport, "rest"), -]) -def test_data_taxonomy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTaxonomyServiceClient, "grpc"), - (DataTaxonomyServiceAsyncClient, "grpc_asyncio"), - (DataTaxonomyServiceClient, "rest"), -]) -def test_data_taxonomy_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_data_taxonomy_service_client_get_transport_class(): - transport = DataTaxonomyServiceClient.get_transport_class() - available_transports = [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceRestTransport, - ] - assert transport in available_transports - - transport = DataTaxonomyServiceClient.get_transport_class("grpc") - assert transport == transports.DataTaxonomyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest"), -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataTaxonomyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "true"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", "false"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", "true"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_taxonomy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient -]) -@mock.patch.object(DataTaxonomyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataTaxonomyServiceClient, DataTaxonomyServiceAsyncClient -]) -@mock.patch.object(DataTaxonomyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceClient)) -@mock.patch.object(DataTaxonomyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTaxonomyServiceAsyncClient)) -def test_data_taxonomy_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTaxonomyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc"), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest"), -]) -def test_data_taxonomy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceRestTransport, "rest", None), -]) -def test_data_taxonomy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_taxonomy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataTaxonomyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport, "grpc", grpc_helpers), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_taxonomy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.CreateDataTaxonomyRequest, - dict, -]) -def test_create_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest( - parent='parent_value', - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_data_taxonomy.CreateDataTaxonomyRequest( - parent='parent_value', - data_taxonomy_id='data_taxonomy_id_value', - ) - -def test_create_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc - request = {} - client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_taxonomy] = mock_rpc - - request = {} - await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_taxonomy_async_from_dict(): - await test_create_data_taxonomy_async(request_type=dict) - -def test_create_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_taxonomy( - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].data_taxonomy_id - mock_val = 'data_taxonomy_id_value' - assert arg == mock_val - - -def test_create_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_taxonomy( - gcd_data_taxonomy.CreateDataTaxonomyRequest(), - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_taxonomy( - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].data_taxonomy_id - mock_val = 'data_taxonomy_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_taxonomy( - gcd_data_taxonomy.CreateDataTaxonomyRequest(), - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.UpdateDataTaxonomyRequest, - dict, -]) -def test_update_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcd_data_taxonomy.UpdateDataTaxonomyRequest( - ) - -def test_update_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc - request = {} - client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_taxonomy] = mock_rpc - - request = {} - await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_taxonomy_async_from_dict(): - await test_update_data_taxonomy_async(request_type=dict) - -def test_update_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - request.data_taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_taxonomy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - request.data_taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_taxonomy.name=name_value', - ) in kw['metadata'] - - -def test_update_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_taxonomy( - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_taxonomy( - gcd_data_taxonomy.UpdateDataTaxonomyRequest(), - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_taxonomy( - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_taxonomy - mock_val = gcd_data_taxonomy.DataTaxonomy(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_taxonomy( - gcd_data_taxonomy.UpdateDataTaxonomyRequest(), - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataTaxonomyRequest, - dict, -]) -def test_delete_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataTaxonomyRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataTaxonomyRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc - request = {} - client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_taxonomy] = mock_rpc - - request = {} - await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_async_from_dict(): - await test_delete_data_taxonomy_async(request_type=dict) - -def test_delete_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_taxonomy( - data_taxonomy.DeleteDataTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_taxonomy( - data_taxonomy.DeleteDataTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataTaxonomiesRequest, - dict, -]) -def test_list_data_taxonomies(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataTaxonomiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataTaxonomiesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_taxonomies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataTaxonomiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_taxonomies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataTaxonomiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_taxonomies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_taxonomies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc - request = {} - client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_taxonomies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_taxonomies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_taxonomies] = mock_rpc - - request = {} - await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_taxonomies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataTaxonomiesRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataTaxonomiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataTaxonomiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_from_dict(): - await test_list_data_taxonomies_async(request_type=dict) - -def test_list_data_taxonomies_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) - await client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_taxonomies_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_taxonomies_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_taxonomies( - data_taxonomy.ListDataTaxonomiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_taxonomies_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_taxonomies_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_taxonomies( - data_taxonomy.ListDataTaxonomiesRequest(), - parent='parent_value', - ) - - -def test_list_data_taxonomies_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_taxonomies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataTaxonomy) - for i in results) -def test_list_data_taxonomies_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_taxonomies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_taxonomies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataTaxonomy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_taxonomies_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_taxonomies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataTaxonomyRequest, - dict, -]) -def test_get_data_taxonomy(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - ) - response = client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataTaxonomy) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - assert response.class_count == 1182 - - -def test_get_data_taxonomy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataTaxonomyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_taxonomy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataTaxonomyRequest( - name='name_value', - ) - -def test_get_data_taxonomy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc - request = {} - client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_taxonomy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_taxonomy] = mock_rpc - - request = {} - await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataTaxonomyRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - )) - response = await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataTaxonomyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataTaxonomy) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - assert response.class_count == 1182 - - -@pytest.mark.asyncio -async def test_get_data_taxonomy_async_from_dict(): - await test_get_data_taxonomy_async(request_type=dict) - -def test_get_data_taxonomy_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = data_taxonomy.DataTaxonomy() - client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_taxonomy_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) - await client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_taxonomy_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_taxonomy_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_taxonomy( - data_taxonomy.GetDataTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_taxonomy_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataTaxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_taxonomy_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_taxonomy( - data_taxonomy.GetDataTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeBindingRequest, - dict, -]) -def test_create_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.CreateDataAttributeBindingRequest( - parent='parent_value', - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.CreateDataAttributeBindingRequest( - parent='parent_value', - data_attribute_binding_id='data_attribute_binding_id_value', - ) - -def test_create_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc - request = {} - client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_attribute_binding] = mock_rpc - - request = {} - await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_async_from_dict(): - await test_create_data_attribute_binding_async(request_type=dict) - -def test_create_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeBindingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeBindingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_attribute_binding( - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_binding_id - mock_val = 'data_attribute_binding_id_value' - assert arg == mock_val - - -def test_create_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute_binding( - data_taxonomy.CreateDataAttributeBindingRequest(), - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_attribute_binding( - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_binding_id - mock_val = 'data_attribute_binding_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_attribute_binding( - data_taxonomy.CreateDataAttributeBindingRequest(), - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeBindingRequest, - dict, -]) -def test_update_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.UpdateDataAttributeBindingRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.UpdateDataAttributeBindingRequest( - ) - -def test_update_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc - request = {} - client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_attribute_binding] = mock_rpc - - request = {} - await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_async_from_dict(): - await test_update_data_attribute_binding_async(request_type=dict) - -def test_update_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeBindingRequest() - - request.data_attribute_binding.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute_binding.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeBindingRequest() - - request.data_attribute_binding.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute_binding.name=name_value', - ) in kw['metadata'] - - -def test_update_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_attribute_binding( - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute_binding( - data_taxonomy.UpdateDataAttributeBindingRequest(), - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_attribute_binding( - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute_binding - mock_val = data_taxonomy.DataAttributeBinding(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_attribute_binding( - data_taxonomy.UpdateDataAttributeBindingRequest(), - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeBindingRequest, - dict, -]) -def test_delete_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataAttributeBindingRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataAttributeBindingRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc - request = {} - client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute_binding] = mock_rpc - - request = {} - await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_async_from_dict(): - await test_delete_data_attribute_binding_async(request_type=dict) - -def test_delete_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute_binding( - data_taxonomy.DeleteDataAttributeBindingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_attribute_binding( - data_taxonomy.DeleteDataAttributeBindingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributeBindingsRequest, - dict, -]) -def test_list_data_attribute_bindings(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributeBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributeBindingsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_attribute_bindings_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataAttributeBindingsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_attribute_bindings(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataAttributeBindingsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_attribute_bindings_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc - request = {} - client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attribute_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_attribute_bindings in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_attribute_bindings] = mock_rpc - - request = {} - await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_attribute_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributeBindingsRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributeBindingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributeBindingsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_from_dict(): - await test_list_data_attribute_bindings_async(request_type=dict) - -def test_list_data_attribute_bindings_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributeBindingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributeBindingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) - await client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_attribute_bindings_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_attribute_bindings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_attribute_bindings_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attribute_bindings( - data_taxonomy.ListDataAttributeBindingsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_attribute_bindings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_attribute_bindings( - data_taxonomy.ListDataAttributeBindingsRequest(), - parent='parent_value', - ) - - -def test_list_data_attribute_bindings_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_attribute_bindings(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttributeBinding) - for i in results) -def test_list_data_attribute_bindings_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_attribute_bindings(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_attribute_bindings(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataAttributeBinding) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_attribute_bindings(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeBindingRequest, - dict, -]) -def test_get_data_attribute_binding(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - resource='resource_value', - ) - response = client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttributeBinding) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.attributes == ['attributes_value'] - - -def test_get_data_attribute_binding_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataAttributeBindingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_attribute_binding(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataAttributeBindingRequest( - name='name_value', - ) - -def test_get_data_attribute_binding_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc - request = {} - client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_attribute_binding in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_attribute_binding] = mock_rpc - - request = {} - await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeBindingRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - )) - response = await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeBindingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttributeBinding) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.attributes == ['attributes_value'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_async_from_dict(): - await test_get_data_attribute_binding_async(request_type=dict) - -def test_get_data_attribute_binding_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = data_taxonomy.DataAttributeBinding() - client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeBindingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) - await client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_attribute_binding_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_attribute_binding_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute_binding( - data_taxonomy.GetDataAttributeBindingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttributeBinding() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_attribute_binding( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_attribute_binding_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_attribute_binding( - data_taxonomy.GetDataAttributeBindingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeRequest, - dict, -]) -def test_create_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.CreateDataAttributeRequest( - parent='parent_value', - data_attribute_id='data_attribute_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.CreateDataAttributeRequest( - parent='parent_value', - data_attribute_id='data_attribute_id_value', - ) - -def test_create_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc - request = {} - client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_attribute] = mock_rpc - - request = {} - await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.CreateDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.CreateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_data_attribute_async_from_dict(): - await test_create_data_attribute_async(request_type=dict) - -def test_create_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.CreateDataAttributeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_attribute( - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_id - mock_val = 'data_attribute_id_value' - assert arg == mock_val - - -def test_create_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute( - data_taxonomy.CreateDataAttributeRequest(), - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - -@pytest.mark.asyncio -async def test_create_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_attribute( - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].data_attribute_id - mock_val = 'data_attribute_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_attribute( - data_taxonomy.CreateDataAttributeRequest(), - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeRequest, - dict, -]) -def test_update_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.UpdateDataAttributeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.UpdateDataAttributeRequest( - ) - -def test_update_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc - request = {} - client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_attribute] = mock_rpc - - request = {} - await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.UpdateDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.UpdateDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_attribute_async_from_dict(): - await test_update_data_attribute_async(request_type=dict) - -def test_update_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeRequest() - - request.data_attribute.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.UpdateDataAttributeRequest() - - request.data_attribute.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_attribute.name=name_value', - ) in kw['metadata'] - - -def test_update_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_attribute( - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute( - data_taxonomy.UpdateDataAttributeRequest(), - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_attribute( - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_attribute - mock_val = data_taxonomy.DataAttribute(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_attribute( - data_taxonomy.UpdateDataAttributeRequest(), - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeRequest, - dict, -]) -def test_delete_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.DeleteDataAttributeRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.DeleteDataAttributeRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc - request = {} - client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_attribute] = mock_rpc - - request = {} - await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.DeleteDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.DeleteDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_data_attribute_async_from_dict(): - await test_delete_data_attribute_async(request_type=dict) - -def test_delete_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.DeleteDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute( - data_taxonomy.DeleteDataAttributeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_attribute( - data_taxonomy.DeleteDataAttributeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributesRequest, - dict, -]) -def test_list_data_attributes(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_data_attributes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.ListDataAttributesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_attributes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.ListDataAttributesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_attributes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc - request = {} - client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attributes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_attributes in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_attributes] = mock_rpc - - request = {} - await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_attributes_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.ListDataAttributesRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.ListDataAttributesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_data_attributes_async_from_dict(): - await test_list_data_attributes_async(request_type=dict) - -def test_list_data_attributes_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributesResponse() - client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_attributes_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.ListDataAttributesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) - await client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_attributes_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_attributes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_attributes_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attributes( - data_taxonomy.ListDataAttributesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_attributes_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.ListDataAttributesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_attributes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_attributes_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_attributes( - data_taxonomy.ListDataAttributesRequest(), - parent='parent_value', - ) - - -def test_list_data_attributes_pager(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_attributes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttribute) - for i in results) -def test_list_data_attributes_pages(transport_name: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_attributes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_attributes_async_pager(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_attributes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, data_taxonomy.DataAttribute) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_attributes_async_pages(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_attributes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeRequest, - dict, -]) -def test_get_data_attribute(request_type, transport: str = 'grpc'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - ) - response = client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttribute) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.parent_id == 'parent_id_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - - -def test_get_data_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = data_taxonomy.GetDataAttributeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == data_taxonomy.GetDataAttributeRequest( - name='name_value', - ) - -def test_get_data_attribute_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc - request = {} - client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_attribute in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_attribute] = mock_rpc - - request = {} - await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_attribute_async(transport: str = 'grpc_asyncio', request_type=data_taxonomy.GetDataAttributeRequest): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - )) - response = await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = data_taxonomy.GetDataAttributeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttribute) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.parent_id == 'parent_id_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_data_attribute_async_from_dict(): - await test_get_data_attribute_async(request_type=dict) - -def test_get_data_attribute_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = data_taxonomy.DataAttribute() - client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_attribute_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = data_taxonomy.GetDataAttributeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) - await client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_attribute_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_attribute_flattened_error(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute( - data_taxonomy.GetDataAttributeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_attribute_flattened_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = data_taxonomy.DataAttribute() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_attribute( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_attribute_flattened_error_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_attribute( - data_taxonomy.GetDataAttributeRequest(), - name='name_value', - ) - - -def test_create_data_taxonomy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_taxonomy] = mock_rpc - - request = {} - client.create_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_data_taxonomy_rest_required_fields(request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["data_taxonomy_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "dataTaxonomyId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_taxonomy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "dataTaxonomyId" in jsonified_request - assert jsonified_request["dataTaxonomyId"] == request_init["data_taxonomy_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["dataTaxonomyId"] = 'data_taxonomy_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_taxonomy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("data_taxonomy_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "dataTaxonomyId" in jsonified_request - assert jsonified_request["dataTaxonomyId"] == 'data_taxonomy_id_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_data_taxonomy(request) - - expected_params = [ - ( - "dataTaxonomyId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_data_taxonomy_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_data_taxonomy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("dataTaxonomyId", "validateOnly", )) & set(("parent", "dataTaxonomyId", "dataTaxonomy", ))) - - -def test_create_data_taxonomy_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_taxonomy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataTaxonomies" % client.transport._host, args[1]) - - -def test_create_data_taxonomy_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_taxonomy( - gcd_data_taxonomy.CreateDataTaxonomyRequest(), - parent='parent_value', - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - data_taxonomy_id='data_taxonomy_id_value', - ) - - -def test_update_data_taxonomy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_taxonomy] = mock_rpc - - request = {} - client.update_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_taxonomy_rest_required_fields(request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_taxonomy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_taxonomy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_taxonomy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_taxonomy_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_taxonomy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataTaxonomy", ))) - - -def test_update_data_taxonomy_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_taxonomy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_taxonomy.name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) - - -def test_update_data_taxonomy_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_taxonomy( - gcd_data_taxonomy.UpdateDataTaxonomyRequest(), - data_taxonomy=gcd_data_taxonomy.DataTaxonomy(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_data_taxonomy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_taxonomy] = mock_rpc - - request = {} - client.delete_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_data_taxonomy_rest_required_fields(request_type=data_taxonomy.DeleteDataTaxonomyRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_taxonomy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_taxonomy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_data_taxonomy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_data_taxonomy_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_data_taxonomy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_data_taxonomy_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_data_taxonomy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) - - -def test_delete_data_taxonomy_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_taxonomy( - data_taxonomy.DeleteDataTaxonomyRequest(), - name='name_value', - ) - - -def test_list_data_taxonomies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_taxonomies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_taxonomies] = mock_rpc - - request = {} - client.list_data_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_taxonomies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_taxonomies_rest_required_fields(request_type=data_taxonomy.ListDataTaxonomiesRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_taxonomies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_taxonomies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataTaxonomiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_taxonomies(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_taxonomies_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_taxonomies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_taxonomies_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataTaxonomiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_taxonomies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataTaxonomies" % client.transport._host, args[1]) - - -def test_list_data_taxonomies_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_taxonomies( - data_taxonomy.ListDataTaxonomiesRequest(), - parent='parent_value', - ) - - -def test_list_data_taxonomies_rest_pager(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[], - next_page_token='def', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataTaxonomiesResponse( - data_taxonomies=[ - data_taxonomy.DataTaxonomy(), - data_taxonomy.DataTaxonomy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(data_taxonomy.ListDataTaxonomiesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_data_taxonomies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataTaxonomy) - for i in results) - - pages = list(client.list_data_taxonomies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_data_taxonomy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_taxonomy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_taxonomy] = mock_rpc - - request = {} - client.get_data_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_taxonomy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_taxonomy_rest_required_fields(request_type=data_taxonomy.GetDataTaxonomyRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_taxonomy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_taxonomy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataTaxonomy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataTaxonomy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_taxonomy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_taxonomy_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_taxonomy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_taxonomy_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataTaxonomy() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.DataTaxonomy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_taxonomy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*}" % client.transport._host, args[1]) - - -def test_get_data_taxonomy_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_taxonomy( - data_taxonomy.GetDataTaxonomyRequest(), - name='name_value', - ) - - -def test_create_data_attribute_binding_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute_binding] = mock_rpc - - request = {} - client.create_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.CreateDataAttributeBindingRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["data_attribute_binding_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "dataAttributeBindingId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute_binding._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "dataAttributeBindingId" in jsonified_request - assert jsonified_request["dataAttributeBindingId"] == request_init["data_attribute_binding_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["dataAttributeBindingId"] = 'data_attribute_binding_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute_binding._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("data_attribute_binding_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "dataAttributeBindingId" in jsonified_request - assert jsonified_request["dataAttributeBindingId"] == 'data_attribute_binding_id_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_data_attribute_binding(request) - - expected_params = [ - ( - "dataAttributeBindingId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_data_attribute_binding_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_data_attribute_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(("dataAttributeBindingId", "validateOnly", )) & set(("parent", "dataAttributeBindingId", "dataAttributeBinding", ))) - - -def test_create_data_attribute_binding_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_attribute_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataAttributeBindings" % client.transport._host, args[1]) - - -def test_create_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute_binding( - data_taxonomy.CreateDataAttributeBindingRequest(), - parent='parent_value', - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - data_attribute_binding_id='data_attribute_binding_id_value', - ) - - -def test_update_data_attribute_binding_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute_binding] = mock_rpc - - request = {} - client.update_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.UpdateDataAttributeBindingRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute_binding._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute_binding._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_attribute_binding(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_attribute_binding_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_attribute_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataAttributeBinding", ))) - - -def test_update_data_attribute_binding_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_attribute_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_attribute_binding.name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) - - -def test_update_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute_binding( - data_taxonomy.UpdateDataAttributeBindingRequest(), - data_attribute_binding=data_taxonomy.DataAttributeBinding(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_data_attribute_binding_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute_binding] = mock_rpc - - request = {} - client.delete_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.DeleteDataAttributeBindingRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["etag"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "etag" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute_binding._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "etag" in jsonified_request - assert jsonified_request["etag"] == request_init["etag"] - - jsonified_request["name"] = 'name_value' - jsonified_request["etag"] = 'etag_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute_binding._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "etag" in jsonified_request - assert jsonified_request["etag"] == 'etag_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_data_attribute_binding(request) - - expected_params = [ - ( - "etag", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_data_attribute_binding_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_data_attribute_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", "etag", ))) - - -def test_delete_data_attribute_binding_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_data_attribute_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) - - -def test_delete_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute_binding( - data_taxonomy.DeleteDataAttributeBindingRequest(), - name='name_value', - ) - - -def test_list_data_attribute_bindings_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attribute_bindings in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attribute_bindings] = mock_rpc - - request = {} - client.list_data_attribute_bindings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attribute_bindings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_attribute_bindings_rest_required_fields(request_type=data_taxonomy.ListDataAttributeBindingsRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attribute_bindings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attribute_bindings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributeBindingsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_attribute_bindings(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_attribute_bindings_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_attribute_bindings._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_attribute_bindings_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributeBindingsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_attribute_bindings(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataAttributeBindings" % client.transport._host, args[1]) - - -def test_list_data_attribute_bindings_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attribute_bindings( - data_taxonomy.ListDataAttributeBindingsRequest(), - parent='parent_value', - ) - - -def test_list_data_attribute_bindings_rest_pager(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributeBindingsResponse( - data_attribute_bindings=[ - data_taxonomy.DataAttributeBinding(), - data_taxonomy.DataAttributeBinding(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(data_taxonomy.ListDataAttributeBindingsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_data_attribute_bindings(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttributeBinding) - for i in results) - - pages = list(client.list_data_attribute_bindings(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_data_attribute_binding_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute_binding in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute_binding] = mock_rpc - - request = {} - client.get_data_attribute_binding(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute_binding(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_attribute_binding_rest_required_fields(request_type=data_taxonomy.GetDataAttributeBindingRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute_binding._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute_binding._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttributeBinding() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttributeBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_attribute_binding(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_attribute_binding_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_attribute_binding._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_attribute_binding_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttributeBinding() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttributeBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_attribute_binding(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataAttributeBindings/*}" % client.transport._host, args[1]) - - -def test_get_data_attribute_binding_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute_binding( - data_taxonomy.GetDataAttributeBindingRequest(), - name='name_value', - ) - - -def test_create_data_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_attribute] = mock_rpc - - request = {} - client.create_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_data_attribute_rest_required_fields(request_type=data_taxonomy.CreateDataAttributeRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["data_attribute_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "dataAttributeId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "dataAttributeId" in jsonified_request - assert jsonified_request["dataAttributeId"] == request_init["data_attribute_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["dataAttributeId"] = 'data_attribute_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("data_attribute_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "dataAttributeId" in jsonified_request - assert jsonified_request["dataAttributeId"] == 'data_attribute_id_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_data_attribute(request) - - expected_params = [ - ( - "dataAttributeId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_data_attribute_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_data_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(("dataAttributeId", "validateOnly", )) & set(("parent", "dataAttributeId", "dataAttribute", ))) - - -def test_create_data_attribute_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes" % client.transport._host, args[1]) - - -def test_create_data_attribute_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_attribute( - data_taxonomy.CreateDataAttributeRequest(), - parent='parent_value', - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - data_attribute_id='data_attribute_id_value', - ) - - -def test_update_data_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_attribute] = mock_rpc - - request = {} - client.update_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_attribute_rest_required_fields(request_type=data_taxonomy.UpdateDataAttributeRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_attribute(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_attribute_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "dataAttribute", ))) - - -def test_update_data_attribute_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_attribute.name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) - - -def test_update_data_attribute_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_attribute( - data_taxonomy.UpdateDataAttributeRequest(), - data_attribute=data_taxonomy.DataAttribute(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_data_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_attribute] = mock_rpc - - request = {} - client.delete_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_data_attribute_rest_required_fields(request_type=data_taxonomy.DeleteDataAttributeRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_data_attribute(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_data_attribute_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_data_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_data_attribute_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_data_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) - - -def test_delete_data_attribute_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_attribute( - data_taxonomy.DeleteDataAttributeRequest(), - name='name_value', - ) - - -def test_list_data_attributes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_attributes] = mock_rpc - - request = {} - client.list_data_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_attributes_rest_required_fields(request_type=data_taxonomy.ListDataAttributesRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attributes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_attributes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_attributes(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_attributes_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_attributes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_attributes_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_attributes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/dataTaxonomies/*}/attributes" % client.transport._host, args[1]) - - -def test_list_data_attributes_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_attributes( - data_taxonomy.ListDataAttributesRequest(), - parent='parent_value', - ) - - -def test_list_data_attributes_rest_pager(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - next_page_token='abc', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[], - next_page_token='def', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - ], - next_page_token='ghi', - ), - data_taxonomy.ListDataAttributesResponse( - data_attributes=[ - data_taxonomy.DataAttribute(), - data_taxonomy.DataAttribute(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(data_taxonomy.ListDataAttributesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - - pager = client.list_data_attributes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, data_taxonomy.DataAttribute) - for i in results) - - pages = list(client.list_data_attributes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_data_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_attribute] = mock_rpc - - request = {} - client.get_data_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_attribute_rest_required_fields(request_type=data_taxonomy.GetDataAttributeRequest): - transport_class = transports.DataTaxonomyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_attribute(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_attribute_rest_unset_required_fields(): - transport = transports.DataTaxonomyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_attribute_rest_flattened(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttribute() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataTaxonomies/*/attributes/*}" % client.transport._host, args[1]) - - -def test_get_data_attribute_rest_flattened_error(transport: str = 'rest'): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_attribute( - data_taxonomy.GetDataAttributeRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTaxonomyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataTaxonomyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTaxonomyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, - transports.DataTaxonomyServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataTaxonomyServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_taxonomies_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - call.return_value = data_taxonomy.ListDataTaxonomiesResponse() - client.list_data_taxonomies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataTaxonomiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_taxonomy_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - call.return_value = data_taxonomy.DataTaxonomy() - client.get_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attribute_bindings_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - client.list_data_attribute_bindings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributeBindingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_binding_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - call.return_value = data_taxonomy.DataAttributeBinding() - client.get_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attributes_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - call.return_value = data_taxonomy.ListDataAttributesResponse() - client.list_data_attributes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_empty_call_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - call.return_value = data_taxonomy.DataAttribute() - client.get_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataTaxonomyServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_taxonomies_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_taxonomies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataTaxonomiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_taxonomy_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - )) - await client.get_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_attribute_bindings_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_attribute_bindings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributeBindingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_attribute_binding_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - )) - await client.get_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_attributes_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_data_attributes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_attribute_empty_call_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - )) - await client.get_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DataTaxonomyServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_data_taxonomy_rest_bad_request(request_type=gcd_data_taxonomy.CreateDataTaxonomyRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_taxonomy(request) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.CreateDataTaxonomyRequest, - dict, -]) -def test_create_data_taxonomy_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["data_taxonomy"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'attribute_count': 1628, 'etag': 'etag_value', 'class_count': 1182} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcd_data_taxonomy.CreateDataTaxonomyRequest.meta.fields["data_taxonomy"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_taxonomy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_taxonomy"][field])): - del request_init["data_taxonomy"][field][i][subfield] - else: - del request_init["data_taxonomy"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_taxonomy(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_taxonomy_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_taxonomy") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_taxonomy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_taxonomy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcd_data_taxonomy.CreateDataTaxonomyRequest.pb(gcd_data_taxonomy.CreateDataTaxonomyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcd_data_taxonomy.CreateDataTaxonomyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_taxonomy_rest_bad_request(request_type=gcd_data_taxonomy.UpdateDataTaxonomyRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_taxonomy(request) - - -@pytest.mark.parametrize("request_type", [ - gcd_data_taxonomy.UpdateDataTaxonomyRequest, - dict, -]) -def test_update_data_taxonomy_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_taxonomy': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'}} - request_init["data_taxonomy"] = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'attribute_count': 1628, 'etag': 'etag_value', 'class_count': 1182} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcd_data_taxonomy.UpdateDataTaxonomyRequest.meta.fields["data_taxonomy"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_taxonomy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_taxonomy"][field])): - del request_init["data_taxonomy"][field][i][subfield] - else: - del request_init["data_taxonomy"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_taxonomy(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_taxonomy_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_taxonomy") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_taxonomy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_taxonomy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcd_data_taxonomy.UpdateDataTaxonomyRequest.pb(gcd_data_taxonomy.UpdateDataTaxonomyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_data_taxonomy_rest_bad_request(request_type=data_taxonomy.DeleteDataTaxonomyRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_taxonomy(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataTaxonomyRequest, - dict, -]) -def test_delete_data_taxonomy_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_taxonomy(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_taxonomy_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_taxonomy") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_taxonomy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_taxonomy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.DeleteDataTaxonomyRequest.pb(data_taxonomy.DeleteDataTaxonomyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.DeleteDataTaxonomyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_taxonomies_rest_bad_request(request_type=data_taxonomy.ListDataTaxonomiesRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_taxonomies(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataTaxonomiesRequest, - dict, -]) -def test_list_data_taxonomies_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataTaxonomiesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataTaxonomiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_taxonomies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataTaxonomiesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_taxonomies_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_taxonomies") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_taxonomies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_taxonomies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.ListDataTaxonomiesRequest.pb(data_taxonomy.ListDataTaxonomiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.ListDataTaxonomiesResponse.to_json(data_taxonomy.ListDataTaxonomiesResponse()) - req.return_value.content = return_value - - request = data_taxonomy.ListDataTaxonomiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.ListDataTaxonomiesResponse() - post_with_metadata.return_value = data_taxonomy.ListDataTaxonomiesResponse(), metadata - - client.list_data_taxonomies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_taxonomy_rest_bad_request(request_type=data_taxonomy.GetDataTaxonomyRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_taxonomy(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataTaxonomyRequest, - dict, -]) -def test_get_data_taxonomy_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataTaxonomy( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - attribute_count=1628, - etag='etag_value', - class_count=1182, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataTaxonomy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_taxonomy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataTaxonomy) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - assert response.class_count == 1182 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_taxonomy_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_taxonomy") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_taxonomy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_taxonomy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.GetDataTaxonomyRequest.pb(data_taxonomy.GetDataTaxonomyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.DataTaxonomy.to_json(data_taxonomy.DataTaxonomy()) - req.return_value.content = return_value - - request = data_taxonomy.GetDataTaxonomyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.DataTaxonomy() - post_with_metadata.return_value = data_taxonomy.DataTaxonomy(), metadata - - client.get_data_taxonomy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.CreateDataAttributeBindingRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_attribute_binding(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeBindingRequest, - dict, -]) -def test_create_data_attribute_binding_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["data_attribute_binding"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'resource': 'resource_value', 'attributes': ['attributes_value1', 'attributes_value2'], 'paths': [{'name': 'name_value', 'attributes': ['attributes_value1', 'attributes_value2']}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = data_taxonomy.CreateDataAttributeBindingRequest.meta.fields["data_attribute_binding"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_attribute_binding"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_attribute_binding"][field])): - del request_init["data_attribute_binding"][field][i][subfield] - else: - del request_init["data_attribute_binding"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_attribute_binding(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_attribute_binding_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_binding") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_binding_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_attribute_binding") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.CreateDataAttributeBindingRequest.pb(data_taxonomy.CreateDataAttributeBindingRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.CreateDataAttributeBindingRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.UpdateDataAttributeBindingRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_attribute_binding(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeBindingRequest, - dict, -]) -def test_update_data_attribute_binding_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_attribute_binding': {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'}} - request_init["data_attribute_binding"] = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'etag': 'etag_value', 'resource': 'resource_value', 'attributes': ['attributes_value1', 'attributes_value2'], 'paths': [{'name': 'name_value', 'attributes': ['attributes_value1', 'attributes_value2']}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = data_taxonomy.UpdateDataAttributeBindingRequest.meta.fields["data_attribute_binding"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_attribute_binding"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_attribute_binding"][field])): - del request_init["data_attribute_binding"][field][i][subfield] - else: - del request_init["data_attribute_binding"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_attribute_binding(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_attribute_binding_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_binding") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_binding_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_attribute_binding") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.UpdateDataAttributeBindingRequest.pb(data_taxonomy.UpdateDataAttributeBindingRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.UpdateDataAttributeBindingRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.DeleteDataAttributeBindingRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_attribute_binding(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeBindingRequest, - dict, -]) -def test_delete_data_attribute_binding_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_attribute_binding(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_attribute_binding_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_binding") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_binding_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_attribute_binding") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.DeleteDataAttributeBindingRequest.pb(data_taxonomy.DeleteDataAttributeBindingRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.DeleteDataAttributeBindingRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_attribute_bindings_rest_bad_request(request_type=data_taxonomy.ListDataAttributeBindingsRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_attribute_bindings(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributeBindingsRequest, - dict, -]) -def test_list_data_attribute_bindings_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributeBindingsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributeBindingsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_attribute_bindings(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributeBindingsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_attribute_bindings_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attribute_bindings") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attribute_bindings_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_attribute_bindings") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.ListDataAttributeBindingsRequest.pb(data_taxonomy.ListDataAttributeBindingsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.ListDataAttributeBindingsResponse.to_json(data_taxonomy.ListDataAttributeBindingsResponse()) - req.return_value.content = return_value - - request = data_taxonomy.ListDataAttributeBindingsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.ListDataAttributeBindingsResponse() - post_with_metadata.return_value = data_taxonomy.ListDataAttributeBindingsResponse(), metadata - - client.list_data_attribute_bindings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_attribute_binding_rest_bad_request(request_type=data_taxonomy.GetDataAttributeBindingRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_attribute_binding(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeBindingRequest, - dict, -]) -def test_get_data_attribute_binding_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataAttributeBindings/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttributeBinding( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - etag='etag_value', - attributes=['attributes_value'], - resource='resource_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttributeBinding.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_attribute_binding(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttributeBinding) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - assert response.attributes == ['attributes_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_attribute_binding_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_binding") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_binding_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_attribute_binding") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.GetDataAttributeBindingRequest.pb(data_taxonomy.GetDataAttributeBindingRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.DataAttributeBinding.to_json(data_taxonomy.DataAttributeBinding()) - req.return_value.content = return_value - - request = data_taxonomy.GetDataAttributeBindingRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.DataAttributeBinding() - post_with_metadata.return_value = data_taxonomy.DataAttributeBinding(), metadata - - client.get_data_attribute_binding(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_data_attribute_rest_bad_request(request_type=data_taxonomy.CreateDataAttributeRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_attribute(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.CreateDataAttributeRequest, - dict, -]) -def test_create_data_attribute_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request_init["data_attribute"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'parent_id': 'parent_id_value', 'attribute_count': 1628, 'etag': 'etag_value', 'resource_access_spec': {'readers': ['readers_value1', 'readers_value2'], 'writers': ['writers_value1', 'writers_value2'], 'owners': ['owners_value1', 'owners_value2']}, 'data_access_spec': {'readers': ['readers_value1', 'readers_value2']}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = data_taxonomy.CreateDataAttributeRequest.meta.fields["data_attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_attribute"][field])): - del request_init["data_attribute"][field][i][subfield] - else: - del request_init["data_attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_attribute(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_attribute_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_create_data_attribute_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_create_data_attribute") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.CreateDataAttributeRequest.pb(data_taxonomy.CreateDataAttributeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.CreateDataAttributeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_attribute_rest_bad_request(request_type=data_taxonomy.UpdateDataAttributeRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_attribute(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.UpdateDataAttributeRequest, - dict, -]) -def test_update_data_attribute_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_attribute': {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'}} - request_init["data_attribute"] = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'labels': {}, 'parent_id': 'parent_id_value', 'attribute_count': 1628, 'etag': 'etag_value', 'resource_access_spec': {'readers': ['readers_value1', 'readers_value2'], 'writers': ['writers_value1', 'writers_value2'], 'owners': ['owners_value1', 'owners_value2']}, 'data_access_spec': {'readers': ['readers_value1', 'readers_value2']}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = data_taxonomy.UpdateDataAttributeRequest.meta.fields["data_attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_attribute"][field])): - del request_init["data_attribute"][field][i][subfield] - else: - del request_init["data_attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_attribute(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_attribute_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_update_data_attribute_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_update_data_attribute") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.UpdateDataAttributeRequest.pb(data_taxonomy.UpdateDataAttributeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.UpdateDataAttributeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_data_attribute_rest_bad_request(request_type=data_taxonomy.DeleteDataAttributeRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_attribute(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.DeleteDataAttributeRequest, - dict, -]) -def test_delete_data_attribute_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_attribute(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_attribute_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_delete_data_attribute_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_delete_data_attribute") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.DeleteDataAttributeRequest.pb(data_taxonomy.DeleteDataAttributeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = data_taxonomy.DeleteDataAttributeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_attributes_rest_bad_request(request_type=data_taxonomy.ListDataAttributesRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_attributes(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.ListDataAttributesRequest, - dict, -]) -def test_list_data_attributes_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/dataTaxonomies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.ListDataAttributesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.ListDataAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_attributes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataAttributesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_attributes_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attributes") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_list_data_attributes_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_list_data_attributes") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.ListDataAttributesRequest.pb(data_taxonomy.ListDataAttributesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.ListDataAttributesResponse.to_json(data_taxonomy.ListDataAttributesResponse()) - req.return_value.content = return_value - - request = data_taxonomy.ListDataAttributesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.ListDataAttributesResponse() - post_with_metadata.return_value = data_taxonomy.ListDataAttributesResponse(), metadata - - client.list_data_attributes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_attribute_rest_bad_request(request_type=data_taxonomy.GetDataAttributeRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_attribute(request) - - -@pytest.mark.parametrize("request_type", [ - data_taxonomy.GetDataAttributeRequest, - dict, -]) -def test_get_data_attribute_rest_call_success(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataTaxonomies/sample3/attributes/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = data_taxonomy.DataAttribute( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - parent_id='parent_id_value', - attribute_count=1628, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = data_taxonomy.DataAttribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, data_taxonomy.DataAttribute) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.parent_id == 'parent_id_value' - assert response.attribute_count == 1628 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_attribute_rest_interceptors(null_interceptor): - transport = transports.DataTaxonomyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTaxonomyServiceRestInterceptor(), - ) - client = DataTaxonomyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute") as post, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "post_get_data_attribute_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTaxonomyServiceRestInterceptor, "pre_get_data_attribute") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = data_taxonomy.GetDataAttributeRequest.pb(data_taxonomy.GetDataAttributeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = data_taxonomy.DataAttribute.to_json(data_taxonomy.DataAttribute()) - req.return_value.content = return_value - - request = data_taxonomy.GetDataAttributeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = data_taxonomy.DataAttribute() - post_with_metadata.return_value = data_taxonomy.DataAttribute(), metadata - - client.get_data_attribute(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_taxonomy_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_taxonomy), - '__call__') as call: - client.create_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.CreateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_taxonomy_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_taxonomy), - '__call__') as call: - client.update_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcd_data_taxonomy.UpdateDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_taxonomy_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_taxonomy), - '__call__') as call: - client.delete_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_taxonomies_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_taxonomies), - '__call__') as call: - client.list_data_taxonomies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataTaxonomiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_taxonomy_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_taxonomy), - '__call__') as call: - client.get_data_taxonomy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataTaxonomyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_binding_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute_binding), - '__call__') as call: - client.create_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_binding_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute_binding), - '__call__') as call: - client.update_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_binding_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute_binding), - '__call__') as call: - client.delete_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attribute_bindings_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attribute_bindings), - '__call__') as call: - client.list_data_attribute_bindings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributeBindingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_binding_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute_binding), - '__call__') as call: - client.get_data_attribute_binding(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeBindingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_attribute_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_attribute), - '__call__') as call: - client.create_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.CreateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_attribute_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_attribute), - '__call__') as call: - client.update_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.UpdateDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_attribute_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_attribute), - '__call__') as call: - client.delete_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.DeleteDataAttributeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_attributes_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_attributes), - '__call__') as call: - client.list_data_attributes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.ListDataAttributesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_attribute_empty_call_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_attribute), - '__call__') as call: - client.get_data_attribute(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = data_taxonomy.GetDataAttributeRequest() - - assert args[0] == request_msg - - -def test_data_taxonomy_service_rest_lro_client(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataTaxonomyServiceGrpcTransport, - ) - -def test_data_taxonomy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataTaxonomyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_taxonomy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataTaxonomyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_taxonomy', - 'update_data_taxonomy', - 'delete_data_taxonomy', - 'list_data_taxonomies', - 'get_data_taxonomy', - 'create_data_attribute_binding', - 'update_data_attribute_binding', - 'delete_data_attribute_binding', - 'list_data_attribute_bindings', - 'get_data_attribute_binding', - 'create_data_attribute', - 'update_data_attribute', - 'delete_data_attribute', - 'list_data_attributes', - 'get_data_attribute', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_taxonomy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTaxonomyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_taxonomy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.data_taxonomy_service.transports.DataTaxonomyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTaxonomyServiceTransport() - adc.assert_called_once() - - -def test_data_taxonomy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataTaxonomyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, - ], -) -def test_data_taxonomy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTaxonomyServiceGrpcTransport, - transports.DataTaxonomyServiceGrpcAsyncIOTransport, - transports.DataTaxonomyServiceRestTransport, - ], -) -def test_data_taxonomy_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataTaxonomyServiceGrpcTransport, grpc_helpers), - (transports.DataTaxonomyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_taxonomy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_data_taxonomy_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DataTaxonomyServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_taxonomy_service_host_no_port(transport_name): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_taxonomy_service_host_with_port(transport_name): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_data_taxonomy_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DataTaxonomyServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DataTaxonomyServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_data_taxonomy._session - session2 = client2.transport.create_data_taxonomy._session - assert session1 != session2 - session1 = client1.transport.update_data_taxonomy._session - session2 = client2.transport.update_data_taxonomy._session - assert session1 != session2 - session1 = client1.transport.delete_data_taxonomy._session - session2 = client2.transport.delete_data_taxonomy._session - assert session1 != session2 - session1 = client1.transport.list_data_taxonomies._session - session2 = client2.transport.list_data_taxonomies._session - assert session1 != session2 - session1 = client1.transport.get_data_taxonomy._session - session2 = client2.transport.get_data_taxonomy._session - assert session1 != session2 - session1 = client1.transport.create_data_attribute_binding._session - session2 = client2.transport.create_data_attribute_binding._session - assert session1 != session2 - session1 = client1.transport.update_data_attribute_binding._session - session2 = client2.transport.update_data_attribute_binding._session - assert session1 != session2 - session1 = client1.transport.delete_data_attribute_binding._session - session2 = client2.transport.delete_data_attribute_binding._session - assert session1 != session2 - session1 = client1.transport.list_data_attribute_bindings._session - session2 = client2.transport.list_data_attribute_bindings._session - assert session1 != session2 - session1 = client1.transport.get_data_attribute_binding._session - session2 = client2.transport.get_data_attribute_binding._session - assert session1 != session2 - session1 = client1.transport.create_data_attribute._session - session2 = client2.transport.create_data_attribute._session - assert session1 != session2 - session1 = client1.transport.update_data_attribute._session - session2 = client2.transport.update_data_attribute._session - assert session1 != session2 - session1 = client1.transport.delete_data_attribute._session - session2 = client2.transport.delete_data_attribute._session - assert session1 != session2 - session1 = client1.transport.list_data_attributes._session - session2 = client2.transport.list_data_attributes._session - assert session1 != session2 - session1 = client1.transport.get_data_attribute._session - session2 = client2.transport.get_data_attribute._session - assert session1 != session2 -def test_data_taxonomy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTaxonomyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_taxonomy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTaxonomyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTaxonomyServiceGrpcTransport, transports.DataTaxonomyServiceGrpcAsyncIOTransport]) -def test_data_taxonomy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_taxonomy_service_grpc_lro_client(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_taxonomy_service_grpc_lro_async_client(): - client = DataTaxonomyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_attribute_path(): - project = "squid" - location = "clam" - dataTaxonomy = "whelk" - data_attribute_id = "octopus" - expected = "projects/{project}/locations/{location}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}".format(project=project, location=location, dataTaxonomy=dataTaxonomy, data_attribute_id=data_attribute_id, ) - actual = DataTaxonomyServiceClient.data_attribute_path(project, location, dataTaxonomy, data_attribute_id) - assert expected == actual - - -def test_parse_data_attribute_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataTaxonomy": "cuttlefish", - "data_attribute_id": "mussel", - } - path = DataTaxonomyServiceClient.data_attribute_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_attribute_path(path) - assert expected == actual - -def test_data_attribute_binding_path(): - project = "winkle" - location = "nautilus" - data_attribute_binding_id = "scallop" - expected = "projects/{project}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}".format(project=project, location=location, data_attribute_binding_id=data_attribute_binding_id, ) - actual = DataTaxonomyServiceClient.data_attribute_binding_path(project, location, data_attribute_binding_id) - assert expected == actual - - -def test_parse_data_attribute_binding_path(): - expected = { - "project": "abalone", - "location": "squid", - "data_attribute_binding_id": "clam", - } - path = DataTaxonomyServiceClient.data_attribute_binding_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_attribute_binding_path(path) - assert expected == actual - -def test_data_taxonomy_path(): - project = "whelk" - location = "octopus" - data_taxonomy_id = "oyster" - expected = "projects/{project}/locations/{location}/dataTaxonomies/{data_taxonomy_id}".format(project=project, location=location, data_taxonomy_id=data_taxonomy_id, ) - actual = DataTaxonomyServiceClient.data_taxonomy_path(project, location, data_taxonomy_id) - assert expected == actual - - -def test_parse_data_taxonomy_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "data_taxonomy_id": "mussel", - } - path = DataTaxonomyServiceClient.data_taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_data_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataTaxonomyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DataTaxonomyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataTaxonomyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DataTaxonomyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataTaxonomyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DataTaxonomyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DataTaxonomyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DataTaxonomyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataTaxonomyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DataTaxonomyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataTaxonomyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataTaxonomyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataTaxonomyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataTaxonomyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DataTaxonomyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataTaxonomyServiceClient, transports.DataTaxonomyServiceGrpcTransport), - (DataTaxonomyServiceAsyncClient, transports.DataTaxonomyServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py deleted file mode 100644 index aade4dcd62f1..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ /dev/null @@ -1,28263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceAsyncClient -from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceClient -from google.cloud.dataplex_v1.services.dataplex_service import pagers -from google.cloud.dataplex_v1.services.dataplex_service import transports -from google.cloud.dataplex_v1.types import analyze -from google.cloud.dataplex_v1.types import resources -from google.cloud.dataplex_v1.types import service -from google.cloud.dataplex_v1.types import tasks -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataplexServiceClient._get_default_mtls_endpoint(None) is None - assert DataplexServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataplexServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataplexServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataplexServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataplexServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataplexServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataplexServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataplexServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataplexServiceClient._get_client_cert_source(None, False) is None - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataplexServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataplexServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataplexServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataplexServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataplexServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataplexServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataplexServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataplexServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataplexServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataplexServiceClient._get_universe_domain(None, None) == DataplexServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataplexServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataplexServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataplexServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataplexServiceClient, "grpc"), - (DataplexServiceAsyncClient, "grpc_asyncio"), - (DataplexServiceClient, "rest"), -]) -def test_dataplex_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataplexServiceGrpcTransport, "grpc"), - (transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DataplexServiceRestTransport, "rest"), -]) -def test_dataplex_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataplexServiceClient, "grpc"), - (DataplexServiceAsyncClient, "grpc_asyncio"), - (DataplexServiceClient, "rest"), -]) -def test_dataplex_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_dataplex_service_client_get_transport_class(): - transport = DataplexServiceClient.get_transport_class() - available_transports = [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceRestTransport, - ] - assert transport in available_transports - - transport = DataplexServiceClient.get_transport_class("grpc") - assert transport == transports.DataplexServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest"), -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test_dataplex_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataplexServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "true"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", "false"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", "true"), - (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dataplex_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataplexServiceClient, DataplexServiceAsyncClient -]) -@mock.patch.object(DataplexServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataplexServiceAsyncClient)) -def test_dataplex_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataplexServiceClient, DataplexServiceAsyncClient -]) -@mock.patch.object(DataplexServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceClient)) -@mock.patch.object(DataplexServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataplexServiceAsyncClient)) -def test_dataplex_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataplexServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest"), -]) -def test_dataplex_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DataplexServiceClient, transports.DataplexServiceRestTransport, "rest", None), -]) -def test_dataplex_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_dataplex_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataplexServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc", grpc_helpers), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_dataplex_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateLakeRequest, - dict, -]) -def test_create_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateLakeRequest( - parent='parent_value', - lake_id='lake_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateLakeRequest( - parent='parent_value', - lake_id='lake_id_value', - ) - -def test_create_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc - request = {} - client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_lake] = mock_rpc - - request = {} - await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lake_async(transport: str = 'grpc_asyncio', request_type=service.CreateLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_lake_async_from_dict(): - await test_create_lake_async(request_type=dict) - -def test_create_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateLakeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateLakeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_lake( - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].lake_id - mock_val = 'lake_id_value' - assert arg == mock_val - - -def test_create_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_lake( - service.CreateLakeRequest(), - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - -@pytest.mark.asyncio -async def test_create_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_lake( - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].lake_id - mock_val = 'lake_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_lake( - service.CreateLakeRequest(), - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateLakeRequest, - dict, -]) -def test_update_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateLakeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateLakeRequest( - ) - -def test_update_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc - request = {} - client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_lake] = mock_rpc - - request = {} - await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_lake_async(transport: str = 'grpc_asyncio', request_type=service.UpdateLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_lake_async_from_dict(): - await test_update_lake_async(request_type=dict) - -def test_update_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateLakeRequest() - - request.lake.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'lake.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateLakeRequest() - - request.lake.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'lake.name=name_value', - ) in kw['metadata'] - - -def test_update_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_lake( - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_lake( - service.UpdateLakeRequest(), - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_lake( - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].lake - mock_val = resources.Lake(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_lake( - service.UpdateLakeRequest(), - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteLakeRequest, - dict, -]) -def test_delete_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteLakeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteLakeRequest( - name='name_value', - ) - -def test_delete_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc - request = {} - client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_lake] = mock_rpc - - request = {} - await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lake_async(transport: str = 'grpc_asyncio', request_type=service.DeleteLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_lake_async_from_dict(): - await test_delete_lake_async(request_type=dict) - -def test_delete_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_lake( - service.DeleteLakeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_lake( - service.DeleteLakeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakesRequest, - dict, -]) -def test_list_lakes(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListLakesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_lakes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListLakesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_lakes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListLakesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_lakes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lakes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc - request = {} - client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lakes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lakes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_lakes in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_lakes] = mock_rpc - - request = {} - await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_lakes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lakes_async(transport: str = 'grpc_asyncio', request_type=service.ListLakesRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListLakesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_lakes_async_from_dict(): - await test_list_lakes_async(request_type=dict) - -def test_list_lakes_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = service.ListLakesResponse() - client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_lakes_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) - await client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_lakes_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_lakes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_lakes_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lakes( - service.ListLakesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_lakes_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListLakesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_lakes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_lakes_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_lakes( - service.ListLakesRequest(), - parent='parent_value', - ) - - -def test_list_lakes_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_lakes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Lake) - for i in results) -def test_list_lakes_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - pages = list(client.list_lakes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_lakes_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_lakes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Lake) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_lakes_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_lakes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetLakeRequest, - dict, -]) -def test_get_lake(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - ) - response = client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Lake) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.service_account == 'service_account_value' - - -def test_get_lake_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetLakeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_lake(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetLakeRequest( - name='name_value', - ) - -def test_get_lake_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc - request = {} - client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lake_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_lake in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_lake] = mock_rpc - - request = {} - await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lake_async(transport: str = 'grpc_asyncio', request_type=service.GetLakeRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - )) - response = await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetLakeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Lake) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_lake_async_from_dict(): - await test_get_lake_async(request_type=dict) - -def test_get_lake_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = resources.Lake() - client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_lake_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetLakeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) - await client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_lake_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_lake_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lake( - service.GetLakeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_lake_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Lake() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_lake( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_lake_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_lake( - service.GetLakeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakeActionsRequest, - dict, -]) -def test_list_lake_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListLakeActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakeActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_lake_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListLakeActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_lake_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListLakeActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_lake_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lake_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc - request = {} - client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lake_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lake_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_lake_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_lake_actions] = mock_rpc - - request = {} - await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_lake_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_lake_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListLakeActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListLakeActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakeActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_lake_actions_async_from_dict(): - await test_list_lake_actions_async(request_type=dict) - -def test_list_lake_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakeActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_lake_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListLakeActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_lake_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_lake_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_lake_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lake_actions( - service.ListLakeActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_lake_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_lake_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_lake_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_lake_actions( - service.ListLakeActionsRequest(), - parent='parent_value', - ) - - -def test_list_lake_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_lake_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_lake_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_lake_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_lake_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_lake_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_lake_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_lake_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateZoneRequest, - dict, -]) -def test_create_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateZoneRequest( - parent='parent_value', - zone_id='zone_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateZoneRequest( - parent='parent_value', - zone_id='zone_id_value', - ) - -def test_create_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc - request = {} - client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_zone] = mock_rpc - - request = {} - await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_zone_async(transport: str = 'grpc_asyncio', request_type=service.CreateZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_zone_async_from_dict(): - await test_create_zone_async(request_type=dict) - -def test_create_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateZoneRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_zone( - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].zone_id - mock_val = 'zone_id_value' - assert arg == mock_val - - -def test_create_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_zone( - service.CreateZoneRequest(), - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - -@pytest.mark.asyncio -async def test_create_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_zone( - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].zone_id - mock_val = 'zone_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_zone( - service.CreateZoneRequest(), - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateZoneRequest, - dict, -]) -def test_update_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateZoneRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateZoneRequest( - ) - -def test_update_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc - request = {} - client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_zone] = mock_rpc - - request = {} - await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_zone_async(transport: str = 'grpc_asyncio', request_type=service.UpdateZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_zone_async_from_dict(): - await test_update_zone_async(request_type=dict) - -def test_update_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() - - request.zone.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'zone.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateZoneRequest() - - request.zone.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'zone.name=name_value', - ) in kw['metadata'] - - -def test_update_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_zone( - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_zone( - service.UpdateZoneRequest(), - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_zone( - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].zone - mock_val = resources.Zone(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_zone( - service.UpdateZoneRequest(), - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteZoneRequest, - dict, -]) -def test_delete_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteZoneRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteZoneRequest( - name='name_value', - ) - -def test_delete_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc - request = {} - client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_zone] = mock_rpc - - request = {} - await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_zone_async(transport: str = 'grpc_asyncio', request_type=service.DeleteZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_zone_async_from_dict(): - await test_delete_zone_async(request_type=dict) - -def test_delete_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_zone( - service.DeleteZoneRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_zone( - service.DeleteZoneRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListZonesRequest, - dict, -]) -def test_list_zones(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_zones_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZonesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_zones(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZonesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_zones_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zones in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc - request = {} - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zones(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zones_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_zones in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_zones] = mock_rpc - - request = {} - await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_zones(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zones_async(transport: str = 'grpc_asyncio', request_type=service.ListZonesRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListZonesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_zones_async_from_dict(): - await test_list_zones_async(request_type=dict) - -def test_list_zones_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_zones_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZonesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) - await client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_zones_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_zones( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_zones_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zones( - service.ListZonesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_zones_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListZonesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_zones( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_zones_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_zones( - service.ListZonesRequest(), - parent='parent_value', - ) - - -def test_list_zones_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_zones(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Zone) - for i in results) -def test_list_zones_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = list(client.list_zones(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_zones_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_zones(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Zone) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_zones_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zones(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetZoneRequest, - dict, -]) -def test_get_zone(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - ) - response = client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == resources.Zone.Type.RAW - - -def test_get_zone_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetZoneRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_zone(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetZoneRequest( - name='name_value', - ) - -def test_get_zone_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc - request = {} - client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_zone_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_zone in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_zone] = mock_rpc - - request = {} - await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_zone_async(transport: str = 'grpc_asyncio', request_type=service.GetZoneRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - )) - response = await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetZoneRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == resources.Zone.Type.RAW - - -@pytest.mark.asyncio -async def test_get_zone_async_from_dict(): - await test_get_zone_async(request_type=dict) - -def test_get_zone_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = resources.Zone() - client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_zone_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetZoneRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - await client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_zone_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_zone_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_zone( - service.GetZoneRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_zone_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Zone() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_zone( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_zone_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_zone( - service.GetZoneRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListZoneActionsRequest, - dict, -]) -def test_list_zone_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListZoneActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZoneActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_zone_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListZoneActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_zone_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListZoneActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_zone_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zone_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc - request = {} - client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zone_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zone_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_zone_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_zone_actions] = mock_rpc - - request = {} - await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_zone_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_zone_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListZoneActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListZoneActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZoneActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_zone_actions_async_from_dict(): - await test_list_zone_actions_async(request_type=dict) - -def test_list_zone_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZoneActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_zone_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListZoneActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_zone_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_zone_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_zone_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zone_actions( - service.ListZoneActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_zone_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_zone_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_zone_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_zone_actions( - service.ListZoneActionsRequest(), - parent='parent_value', - ) - - -def test_list_zone_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_zone_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_zone_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_zone_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_zone_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_zone_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_zone_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_zone_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateAssetRequest, - dict, -]) -def test_create_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateAssetRequest( - parent='parent_value', - asset_id='asset_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateAssetRequest( - parent='parent_value', - asset_id='asset_id_value', - ) - -def test_create_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc - request = {} - client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_asset] = mock_rpc - - request = {} - await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_asset_async(transport: str = 'grpc_asyncio', request_type=service.CreateAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_asset_async_from_dict(): - await test_create_asset_async(request_type=dict) - -def test_create_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateAssetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateAssetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_asset( - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].asset_id - mock_val = 'asset_id_value' - assert arg == mock_val - - -def test_create_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_asset( - service.CreateAssetRequest(), - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - -@pytest.mark.asyncio -async def test_create_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_asset( - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].asset_id - mock_val = 'asset_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_asset( - service.CreateAssetRequest(), - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateAssetRequest, - dict, -]) -def test_update_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateAssetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateAssetRequest( - ) - -def test_update_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc - request = {} - client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_asset] = mock_rpc - - request = {} - await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_asset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_asset_async_from_dict(): - await test_update_asset_async(request_type=dict) - -def test_update_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateAssetRequest() - - request.asset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'asset.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateAssetRequest() - - request.asset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'asset.name=name_value', - ) in kw['metadata'] - - -def test_update_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_asset( - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_asset( - service.UpdateAssetRequest(), - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_asset( - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].asset - mock_val = resources.Asset(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_asset( - service.UpdateAssetRequest(), - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteAssetRequest, - dict, -]) -def test_delete_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteAssetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteAssetRequest( - name='name_value', - ) - -def test_delete_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc - request = {} - client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_asset] = mock_rpc - - request = {} - await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_asset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_asset_async_from_dict(): - await test_delete_asset_async(request_type=dict) - -def test_delete_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_asset( - service.DeleteAssetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_asset( - service.DeleteAssetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc - - request = {} - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_assets_async_from_dict(): - await test_list_assets_async(request_type=dict) - -def test_list_assets_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = service.ListAssetsResponse() - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_assets_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_assets_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_assets_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - service.ListAssetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_assets_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListAssetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_assets_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_assets( - service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_assets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Asset) - for i in results) -def test_list_assets_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_assets_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Asset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_assets_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_assets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetAssetRequest, - dict, -]) -def test_get_asset(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - response = client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Asset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -def test_get_asset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetAssetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_asset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetAssetRequest( - name='name_value', - ) - -def test_get_asset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc - request = {} - client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_asset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_asset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_asset] = mock_rpc - - request = {} - await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_asset_async(transport: str = 'grpc_asyncio', request_type=service.GetAssetRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - response = await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetAssetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Asset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_asset_async_from_dict(): - await test_get_asset_async(request_type=dict) - -def test_get_asset_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = resources.Asset() - client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_asset_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAssetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) - await client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_asset_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_asset_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_asset( - service.GetAssetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_asset_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Asset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_asset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_asset_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_asset( - service.GetAssetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetActionsRequest, - dict, -]) -def test_list_asset_actions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListAssetActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_asset_actions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListAssetActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_asset_actions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListAssetActionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_asset_actions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_asset_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc - request = {} - client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_asset_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_asset_actions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_asset_actions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_asset_actions] = mock_rpc - - request = {} - await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_asset_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_asset_actions_async(transport: str = 'grpc_asyncio', request_type=service.ListAssetActionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListAssetActionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetActionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_asset_actions_async_from_dict(): - await test_list_asset_actions_async(request_type=dict) - -def test_list_asset_actions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_asset_actions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListAssetActionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - await client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_asset_actions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_asset_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_asset_actions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_asset_actions( - service.ListAssetActionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_asset_actions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListActionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_asset_actions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_asset_actions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_asset_actions( - service.ListAssetActionsRequest(), - parent='parent_value', - ) - - -def test_list_asset_actions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_asset_actions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) -def test_list_asset_actions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = list(client.list_asset_actions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_asset_actions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_asset_actions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Action) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_asset_actions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_asset_actions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.CreateTaskRequest, - dict, -]) -def test_create_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateTaskRequest( - parent='parent_value', - task_id='task_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateTaskRequest( - parent='parent_value', - task_id='task_id_value', - ) - -def test_create_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_task] = mock_rpc - request = {} - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_task] = mock_rpc - - request = {} - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=service.CreateTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_task_async_from_dict(): - await test_create_task_async(request_type=dict) - -def test_create_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateTaskRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateTaskRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_task( - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].task_id - mock_val = 'task_id_value' - assert arg == mock_val - - -def test_create_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - service.CreateTaskRequest(), - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - -@pytest.mark.asyncio -async def test_create_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_task( - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].task_id - mock_val = 'task_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_task( - service.CreateTaskRequest(), - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateTaskRequest, - dict, -]) -def test_update_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateTaskRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateTaskRequest( - ) - -def test_update_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_task] = mock_rpc - request = {} - client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_task] = mock_rpc - - request = {} - await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_task_async(transport: str = 'grpc_asyncio', request_type=service.UpdateTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_task_async_from_dict(): - await test_update_task_async(request_type=dict) - -def test_update_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTaskRequest() - - request.task.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'task.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTaskRequest() - - request.task.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'task.name=name_value', - ) in kw['metadata'] - - -def test_update_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_task( - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_task( - service.UpdateTaskRequest(), - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_task( - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].task - mock_val = tasks.Task(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_task( - service.UpdateTaskRequest(), - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteTaskRequest, - dict, -]) -def test_delete_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteTaskRequest( - name='name_value', - ) - -def test_delete_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc - request = {} - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_task] = mock_rpc - - request = {} - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=service.DeleteTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_task_async_from_dict(): - await test_delete_task_async(request_type=dict) - -def test_delete_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - service.DeleteTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_task( - service.DeleteTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListTasksRequest, - dict, -]) -def test_list_tasks(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListTasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListTasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc - - request = {} - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=service.ListTasksRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - -def test_list_tasks_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = service.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tasks_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - service.ListTasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - service.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Task) - for i in results) -def test_list_tasks_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tasks.Task) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetTaskRequest, - dict, -]) -def test_get_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Task) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetTaskRequest( - name='name_value', - ) - -def test_get_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc - - request = {} - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=service.GetTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Task) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - -def test_get_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = tasks.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - service.GetTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - service.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListJobsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListJobsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc - - request = {} - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=service.ListJobsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - -def test_list_jobs_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = service.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - service.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - service.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tasks.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.RunTaskRequest, - dict, -]) -def test_run_task(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse( - ) - response = client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.RunTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, service.RunTaskResponse) - - -def test_run_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.RunTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.run_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.RunTaskRequest( - name='name_value', - ) - -def test_run_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_task] = mock_rpc - request = {} - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.run_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.run_task] = mock_rpc - - request = {} - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.run_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=service.RunTaskRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( - )) - response = await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.RunTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, service.RunTaskResponse) - - -@pytest.mark.asyncio -async def test_run_task_async_from_dict(): - await test_run_task_async(request_type=dict) - -def test_run_task_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.RunTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = service.RunTaskResponse() - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_task_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.RunTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_run_task_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_run_task_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - service.RunTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_run_task_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.RunTaskResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_task_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_task( - service.RunTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == tasks.Job.State.RUNNING - assert response.retry_count == 1214 - assert response.service == tasks.Job.Service.DATAPROC - assert response.service_job == 'service_job_value' - assert response.message == 'message_value' - assert response.trigger == tasks.Job.Trigger.TASK_CONFIG - - -def test_get_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetJobRequest( - name='name_value', - ) - -def test_get_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc - - request = {} - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=service.GetJobRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == tasks.Job.State.RUNNING - assert response.retry_count == 1214 - assert response.service == tasks.Job.Service.DATAPROC - assert response.service_job == 'service_job_value' - assert response.message == 'message_value' - assert response.trigger == tasks.Job.Trigger.TASK_CONFIG - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - -def test_get_job_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = tasks.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - service.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tasks.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - service.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CancelJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CancelJobRequest( - name='name_value', - ) - -def test_cancel_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc - - request = {} - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=service.CancelJobRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - -def test_cancel_job_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = None - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - service.CancelJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - service.CancelJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateEnvironmentRequest, - dict, -]) -def test_create_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateEnvironmentRequest( - parent='parent_value', - environment_id='environment_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateEnvironmentRequest( - parent='parent_value', - environment_id='environment_id_value', - ) - -def test_create_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc - request = {} - client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_environment] = mock_rpc - - request = {} - await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_environment_async(transport: str = 'grpc_asyncio', request_type=service.CreateEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_environment_async_from_dict(): - await test_create_environment_async(request_type=dict) - -def test_create_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateEnvironmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateEnvironmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_environment( - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].environment_id - mock_val = 'environment_id_value' - assert arg == mock_val - - -def test_create_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_environment( - service.CreateEnvironmentRequest(), - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - -@pytest.mark.asyncio -async def test_create_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_environment( - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].environment_id - mock_val = 'environment_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_environment( - service.CreateEnvironmentRequest(), - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateEnvironmentRequest, - dict, -]) -def test_update_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateEnvironmentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateEnvironmentRequest( - ) - -def test_update_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc - request = {} - client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_environment] = mock_rpc - - request = {} - await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_environment_async(transport: str = 'grpc_asyncio', request_type=service.UpdateEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_environment_async_from_dict(): - await test_update_environment_async(request_type=dict) - -def test_update_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateEnvironmentRequest() - - request.environment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'environment.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateEnvironmentRequest() - - request.environment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'environment.name=name_value', - ) in kw['metadata'] - - -def test_update_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_environment( - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_environment( - service.UpdateEnvironmentRequest(), - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_environment( - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].environment - mock_val = analyze.Environment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_environment( - service.UpdateEnvironmentRequest(), - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteEnvironmentRequest, - dict, -]) -def test_delete_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteEnvironmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteEnvironmentRequest( - name='name_value', - ) - -def test_delete_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc - request = {} - client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_environment] = mock_rpc - - request = {} - await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_environment_async(transport: str = 'grpc_asyncio', request_type=service.DeleteEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_environment_async_from_dict(): - await test_delete_environment_async(request_type=dict) - -def test_delete_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_environment( - service.DeleteEnvironmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_environment( - service.DeleteEnvironmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListEnvironmentsRequest, - dict, -]) -def test_list_environments(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListEnvironmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_environments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListEnvironmentsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_environments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListEnvironmentsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_environments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_environments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc - request = {} - client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_environments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_environments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_environments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_environments] = mock_rpc - - request = {} - await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_environments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_environments_async(transport: str = 'grpc_asyncio', request_type=service.ListEnvironmentsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListEnvironmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_environments_async_from_dict(): - await test_list_environments_async(request_type=dict) - -def test_list_environments_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListEnvironmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = service.ListEnvironmentsResponse() - client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_environments_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListEnvironmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) - await client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_environments_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_environments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_environments_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_environments( - service.ListEnvironmentsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_environments_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListEnvironmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_environments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_environments_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_environments( - service.ListEnvironmentsRequest(), - parent='parent_value', - ) - - -def test_list_environments_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_environments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Environment) - for i in results) -def test_list_environments_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_environments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_environments_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_environments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Environment) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_environments_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_environments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.GetEnvironmentRequest, - dict, -]) -def test_get_environment(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - response = client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Environment) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -def test_get_environment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetEnvironmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_environment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetEnvironmentRequest( - name='name_value', - ) - -def test_get_environment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc - request = {} - client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_environment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_environment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_environment] = mock_rpc - - request = {} - await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_environment_async(transport: str = 'grpc_asyncio', request_type=service.GetEnvironmentRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - response = await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetEnvironmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Environment) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_environment_async_from_dict(): - await test_get_environment_async(request_type=dict) - -def test_get_environment_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = analyze.Environment() - client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_environment_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetEnvironmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) - await client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_environment_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_environment_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_environment( - service.GetEnvironmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_environment_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyze.Environment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_environment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_environment_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_environment( - service.GetEnvironmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListSessionsRequest, - dict, -]) -def test_list_sessions(request_type, transport: str = 'grpc'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListSessionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSessionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc - - request = {} - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=service.ListSessionsRequest): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sessions_async_from_dict(): - await test_list_sessions_async(request_type=dict) - -def test_list_sessions_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListSessionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = service.ListSessionsResponse() - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sessions_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListSessionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_sessions_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sessions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_sessions_flattened_error(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - service.ListSessionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_sessions_flattened_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sessions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_sessions_flattened_error_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sessions( - service.ListSessionsRequest(), - parent='parent_value', - ) - - -def test_list_sessions_pager(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_sessions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Session) - for i in results) -def test_list_sessions_pages(transport_name: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sessions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sessions_async_pager(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sessions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyze.Session) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_sessions_async_pages(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_sessions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_lake_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_lake] = mock_rpc - - request = {} - client.create_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_lake_rest_required_fields(request_type=service.CreateLakeRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["lake_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "lakeId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "lakeId" in jsonified_request - assert jsonified_request["lakeId"] == request_init["lake_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["lakeId"] = 'lake_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lake._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("lake_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "lakeId" in jsonified_request - assert jsonified_request["lakeId"] == 'lake_id_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_lake(request) - - expected_params = [ - ( - "lakeId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_lake_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_lake._get_unset_required_fields({}) - assert set(unset_fields) == (set(("lakeId", "validateOnly", )) & set(("parent", "lakeId", "lake", ))) - - -def test_create_lake_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_lake(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/lakes" % client.transport._host, args[1]) - - -def test_create_lake_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_lake( - service.CreateLakeRequest(), - parent='parent_value', - lake=resources.Lake(name='name_value'), - lake_id='lake_id_value', - ) - - -def test_update_lake_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_lake] = mock_rpc - - request = {} - client.update_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_lake_rest_required_fields(request_type=service.UpdateLakeRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_lake._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_lake(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_lake_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_lake._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "lake", ))) - - -def test_update_lake_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_lake(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{lake.name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) - - -def test_update_lake_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_lake( - service.UpdateLakeRequest(), - lake=resources.Lake(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_lake_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_lake] = mock_rpc - - request = {} - client.delete_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_lake_rest_required_fields(request_type=service.DeleteLakeRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_lake(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_lake_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_lake._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_lake_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_lake(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) - - -def test_delete_lake_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_lake( - service.DeleteLakeRequest(), - name='name_value', - ) - - -def test_list_lakes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lakes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lakes] = mock_rpc - - request = {} - client.list_lakes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lakes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_lakes_rest_required_fields(request_type=service.ListLakesRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lakes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lakes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListLakesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListLakesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_lakes(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_lakes_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_lakes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_lakes_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListLakesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListLakesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_lakes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/lakes" % client.transport._host, args[1]) - - -def test_list_lakes_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lakes( - service.ListLakesRequest(), - parent='parent_value', - ) - - -def test_list_lakes_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - resources.Lake(), - ], - next_page_token='abc', - ), - service.ListLakesResponse( - lakes=[], - next_page_token='def', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - ], - next_page_token='ghi', - ), - service.ListLakesResponse( - lakes=[ - resources.Lake(), - resources.Lake(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListLakesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_lakes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Lake) - for i in results) - - pages = list(client.list_lakes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_lake_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_lake in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_lake] = mock_rpc - - request = {} - client.get_lake(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lake(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_lake_rest_required_fields(request_type=service.GetLakeRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lake._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resources.Lake() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Lake.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_lake(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_lake_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_lake._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_lake_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Lake() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Lake.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_lake(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*}" % client.transport._host, args[1]) - - -def test_get_lake_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lake( - service.GetLakeRequest(), - name='name_value', - ) - - -def test_list_lake_actions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_lake_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_lake_actions] = mock_rpc - - request = {} - client.list_lake_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_lake_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_lake_actions_rest_required_fields(request_type=service.ListLakeActionsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lake_actions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_lake_actions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_lake_actions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_lake_actions_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_lake_actions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_lake_actions_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_lake_actions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/actions" % client.transport._host, args[1]) - - -def test_list_lake_actions_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lake_actions( - service.ListLakeActionsRequest(), - parent='parent_value', - ) - - -def test_list_lake_actions_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListActionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - pager = client.list_lake_actions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) - - pages = list(client.list_lake_actions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_zone_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_zone] = mock_rpc - - request = {} - client.create_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_zone_rest_required_fields(request_type=service.CreateZoneRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["zone_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "zoneId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "zoneId" in jsonified_request - assert jsonified_request["zoneId"] == request_init["zone_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["zoneId"] = 'zone_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_zone._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", "zone_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "zoneId" in jsonified_request - assert jsonified_request["zoneId"] == 'zone_id_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_zone(request) - - expected_params = [ - ( - "zoneId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_zone_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_zone._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", "zoneId", )) & set(("parent", "zoneId", "zone", ))) - - -def test_create_zone_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_zone(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/zones" % client.transport._host, args[1]) - - -def test_create_zone_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_zone( - service.CreateZoneRequest(), - parent='parent_value', - zone=resources.Zone(name='name_value'), - zone_id='zone_id_value', - ) - - -def test_update_zone_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_zone] = mock_rpc - - request = {} - client.update_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_zone_rest_required_fields(request_type=service.UpdateZoneRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_zone._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_zone(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_zone_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_zone._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "zone", ))) - - -def test_update_zone_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_zone(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{zone.name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) - - -def test_update_zone_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_zone( - service.UpdateZoneRequest(), - zone=resources.Zone(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_zone_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_zone] = mock_rpc - - request = {} - client.delete_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_zone_rest_required_fields(request_type=service.DeleteZoneRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_zone(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_zone_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_zone._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_zone_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_zone(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) - - -def test_delete_zone_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_zone( - service.DeleteZoneRequest(), - name='name_value', - ) - - -def test_list_zones_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zones in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zones] = mock_rpc - - request = {} - client.list_zones(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zones(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_zones_rest_required_fields(request_type=service.ListZonesRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zones._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zones._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListZonesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListZonesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_zones(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_zones_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_zones._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_zones_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListZonesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListZonesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_zones(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/zones" % client.transport._host, args[1]) - - -def test_list_zones_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zones( - service.ListZonesRequest(), - parent='parent_value', - ) - - -def test_list_zones_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - resources.Zone(), - ], - next_page_token='abc', - ), - service.ListZonesResponse( - zones=[], - next_page_token='def', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - ], - next_page_token='ghi', - ), - service.ListZonesResponse( - zones=[ - resources.Zone(), - resources.Zone(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListZonesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - pager = client.list_zones(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Zone) - for i in results) - - pages = list(client.list_zones(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_zone_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_zone in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_zone] = mock_rpc - - request = {} - client.get_zone(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_zone(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_zone_rest_required_fields(request_type=service.GetZoneRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_zone._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resources.Zone() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_zone(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_zone_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_zone._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_zone_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Zone() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_zone(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*}" % client.transport._host, args[1]) - - -def test_get_zone_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_zone( - service.GetZoneRequest(), - name='name_value', - ) - - -def test_list_zone_actions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_zone_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_zone_actions] = mock_rpc - - request = {} - client.list_zone_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_zone_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_zone_actions_rest_required_fields(request_type=service.ListZoneActionsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zone_actions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_zone_actions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_zone_actions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_zone_actions_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_zone_actions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_zone_actions_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_zone_actions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/actions" % client.transport._host, args[1]) - - -def test_list_zone_actions_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_zone_actions( - service.ListZoneActionsRequest(), - parent='parent_value', - ) - - -def test_list_zone_actions_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListActionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - pager = client.list_zone_actions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) - - pages = list(client.list_zone_actions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_asset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_asset] = mock_rpc - - request = {} - client.create_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_asset_rest_required_fields(request_type=service.CreateAssetRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["asset_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "assetId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "assetId" in jsonified_request - assert jsonified_request["assetId"] == request_init["asset_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["assetId"] = 'asset_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_asset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "assetId" in jsonified_request - assert jsonified_request["assetId"] == 'asset_id_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_asset(request) - - expected_params = [ - ( - "assetId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_asset_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_asset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetId", "validateOnly", )) & set(("parent", "assetId", "asset", ))) - - -def test_create_asset_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_asset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets" % client.transport._host, args[1]) - - -def test_create_asset_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_asset( - service.CreateAssetRequest(), - parent='parent_value', - asset=resources.Asset(name='name_value'), - asset_id='asset_id_value', - ) - - -def test_update_asset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_asset] = mock_rpc - - request = {} - client.update_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_asset_rest_required_fields(request_type=service.UpdateAssetRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_asset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_asset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_asset_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_asset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "asset", ))) - - -def test_update_asset_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} - - # get truthy value for each flattened field - mock_args = dict( - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_asset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{asset.name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) - - -def test_update_asset_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_asset( - service.UpdateAssetRequest(), - asset=resources.Asset(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_asset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_asset] = mock_rpc - - request = {} - client.delete_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_asset_rest_required_fields(request_type=service.DeleteAssetRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_asset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_asset_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_asset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_asset_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_asset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) - - -def test_delete_asset_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_asset( - service.DeleteAssetRequest(), - name='name_value', - ) - - -def test_list_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_assets_rest_required_fields(request_type=service.ListAssetsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListAssetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_assets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_assets_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_assets_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_assets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/assets" % client.transport._host, args[1]) - - -def test_list_assets_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - resources.Asset(), - ], - next_page_token='abc', - ), - service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - ], - next_page_token='ghi', - ), - service.ListAssetsResponse( - assets=[ - resources.Asset(), - resources.Asset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - pager = client.list_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Asset) - for i in results) - - pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_asset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_asset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_asset] = mock_rpc - - request = {} - client.get_asset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_asset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_asset_rest_required_fields(request_type=service.GetAssetRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_asset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resources.Asset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Asset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_asset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_asset_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_asset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_asset_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Asset() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Asset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_asset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/assets/*}" % client.transport._host, args[1]) - - -def test_get_asset_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_asset( - service.GetAssetRequest(), - name='name_value', - ) - - -def test_list_asset_actions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_asset_actions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_asset_actions] = mock_rpc - - request = {} - client.list_asset_actions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_asset_actions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_asset_actions_rest_required_fields(request_type=service.ListAssetActionsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_asset_actions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_asset_actions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_asset_actions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_asset_actions_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_asset_actions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_asset_actions_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_asset_actions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/assets/*}/actions" % client.transport._host, args[1]) - - -def test_list_asset_actions_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_asset_actions( - service.ListAssetActionsRequest(), - parent='parent_value', - ) - - -def test_list_asset_actions_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - resources.Action(), - ], - next_page_token='abc', - ), - service.ListActionsResponse( - actions=[], - next_page_token='def', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - ], - next_page_token='ghi', - ), - service.ListActionsResponse( - actions=[ - resources.Action(), - resources.Action(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListActionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - - pager = client.list_asset_actions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Action) - for i in results) - - pages = list(client.list_asset_actions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_task] = mock_rpc - - request = {} - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_task_rest_required_fields(request_type=service.CreateTaskRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["task_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "taskId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "taskId" in jsonified_request - assert jsonified_request["taskId"] == request_init["task_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["taskId"] = 'task_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_task._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("task_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "taskId" in jsonified_request - assert jsonified_request["taskId"] == 'task_id_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_task(request) - - expected_params = [ - ( - "taskId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_task_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(("taskId", "validateOnly", )) & set(("parent", "taskId", "task", ))) - - -def test_create_task_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/tasks" % client.transport._host, args[1]) - - -def test_create_task_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - service.CreateTaskRequest(), - parent='parent_value', - task=tasks.Task(name='name_value'), - task_id='task_id_value', - ) - - -def test_update_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_task] = mock_rpc - - request = {} - client.update_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_task_rest_required_fields(request_type=service.UpdateTaskRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_task._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_task_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "task", ))) - - -def test_update_task_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{task.name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) - - -def test_update_task_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_task( - service.UpdateTaskRequest(), - task=tasks.Task(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_task] = mock_rpc - - request = {} - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_task_rest_required_fields(request_type=service.DeleteTaskRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_task_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_task_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) - - -def test_delete_task_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - service.DeleteTaskRequest(), - name='name_value', - ) - - -def test_list_tasks_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_tasks_rest_required_fields(request_type=service.ListTasksRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListTasksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_tasks(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_tasks_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_tasks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_tasks_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListTasksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_tasks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/tasks" % client.transport._host, args[1]) - - -def test_list_tasks_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - service.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - tasks.Task(), - ], - next_page_token='abc', - ), - service.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - ], - next_page_token='ghi', - ), - service.ListTasksResponse( - tasks=[ - tasks.Task(), - tasks.Task(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListTasksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - pager = client.list_tasks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Task) - for i in results) - - pages = list(client.list_tasks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_task_rest_required_fields(request_type=service.GetTaskRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = tasks.Task() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = tasks.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_task_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_task_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = tasks.Task() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = tasks.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}" % client.transport._host, args[1]) - - -def test_get_task_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - service.GetTaskRequest(), - name='name_value', - ) - - -def test_list_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_jobs_rest_required_fields(request_type=service.ListJobsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_jobs_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_jobs_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/tasks/*}/jobs" % client.transport._host, args[1]) - - -def test_list_jobs_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - service.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - tasks.Job(), - ], - next_page_token='abc', - ), - service.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - ], - next_page_token='ghi', - ), - service.ListJobsResponse( - jobs=[ - tasks.Job(), - tasks.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tasks.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_run_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.run_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.run_task] = mock_rpc - - request = {} - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.run_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_run_task_rest_required_fields(request_type=service.RunTaskRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.RunTaskResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.RunTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.run_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_task_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_run_task_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.RunTaskResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.RunTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.run_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*}:run" % client.transport._host, args[1]) - - -def test_run_task_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - service.RunTaskRequest(), - name='name_value', - ) - - -def test_get_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_job_rest_required_fields(request_type=service.GetJobRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = tasks.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = tasks.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_job_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = tasks.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = tasks.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}" % client.transport._host, args[1]) - - -def test_get_job_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - service.GetJobRequest(), - name='name_value', - ) - - -def test_cancel_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_job_rest_required_fields(request_type=service.CancelJobRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_job_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_cancel_job_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.cancel_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/tasks/*/jobs/*}:cancel" % client.transport._host, args[1]) - - -def test_cancel_job_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - service.CancelJobRequest(), - name='name_value', - ) - - -def test_create_environment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_environment] = mock_rpc - - request = {} - client.create_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_environment_rest_required_fields(request_type=service.CreateEnvironmentRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["environment_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "environmentId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "environmentId" in jsonified_request - assert jsonified_request["environmentId"] == request_init["environment_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["environmentId"] = 'environment_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_environment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("environment_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "environmentId" in jsonified_request - assert jsonified_request["environmentId"] == 'environment_id_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_environment(request) - - expected_params = [ - ( - "environmentId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_environment_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_environment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("environmentId", "validateOnly", )) & set(("parent", "environmentId", "environment", ))) - - -def test_create_environment_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/environments" % client.transport._host, args[1]) - - -def test_create_environment_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_environment( - service.CreateEnvironmentRequest(), - parent='parent_value', - environment=analyze.Environment(name='name_value'), - environment_id='environment_id_value', - ) - - -def test_update_environment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_environment] = mock_rpc - - request = {} - client.update_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_environment_rest_required_fields(request_type=service.UpdateEnvironmentRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_environment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_environment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_environment_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_environment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("updateMask", "environment", ))) - - -def test_update_environment_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{environment.name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) - - -def test_update_environment_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_environment( - service.UpdateEnvironmentRequest(), - environment=analyze.Environment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_environment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_environment] = mock_rpc - - request = {} - client.delete_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_environment_rest_required_fields(request_type=service.DeleteEnvironmentRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_environment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_environment_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_environment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_environment_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) - - -def test_delete_environment_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_environment( - service.DeleteEnvironmentRequest(), - name='name_value', - ) - - -def test_list_environments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_environments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_environments] = mock_rpc - - request = {} - client.list_environments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_environments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_environments_rest_required_fields(request_type=service.ListEnvironmentsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_environments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_environments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListEnvironmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_environments(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_environments_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_environments._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_environments_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListEnvironmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_environments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*}/environments" % client.transport._host, args[1]) - - -def test_list_environments_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_environments( - service.ListEnvironmentsRequest(), - parent='parent_value', - ) - - -def test_list_environments_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - analyze.Environment(), - ], - next_page_token='abc', - ), - service.ListEnvironmentsResponse( - environments=[], - next_page_token='def', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - ], - next_page_token='ghi', - ), - service.ListEnvironmentsResponse( - environments=[ - analyze.Environment(), - analyze.Environment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListEnvironmentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - - pager = client.list_environments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Environment) - for i in results) - - pages = list(client.list_environments(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_environment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_environment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_environment] = mock_rpc - - request = {} - client.get_environment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_environment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_environment_rest_required_fields(request_type=service.GetEnvironmentRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analyze.Environment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_environment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_environment_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_environment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_environment_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Environment() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analyze.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/environments/*}" % client.transport._host, args[1]) - - -def test_get_environment_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_environment( - service.GetEnvironmentRequest(), - name='name_value', - ) - - -def test_list_sessions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_sessions_rest_required_fields(request_type=service.ListSessionsRequest): - transport_class = transports.DataplexServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListSessionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_sessions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_sessions_rest_unset_required_fields(): - transport = transports.DataplexServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_sessions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_sessions_rest_flattened(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListSessionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_sessions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/environments/*}/sessions" % client.transport._host, args[1]) - - -def test_list_sessions_rest_flattened_error(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - service.ListSessionsRequest(), - parent='parent_value', - ) - - -def test_list_sessions_rest_pager(transport: str = 'rest'): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - analyze.Session(), - ], - next_page_token='abc', - ), - service.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - ], - next_page_token='ghi', - ), - service.ListSessionsResponse( - sessions=[ - analyze.Session(), - analyze.Session(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListSessionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - - pager = client.list_sessions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyze.Session) - for i in results) - - pages = list(client.list_sessions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataplexServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataplexServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataplexServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataplexServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, - transports.DataplexServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataplexServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lakes_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - call.return_value = service.ListLakesResponse() - client.list_lakes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lake_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - call.return_value = resources.Lake() - client.get_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lake_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_lake_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakeActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zones_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - call.return_value = service.ListZonesResponse() - client.list_zones(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZonesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_zone_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - call.return_value = resources.Zone() - client.get_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zone_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_zone_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZoneActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = service.ListAssetsResponse() - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_asset_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - call.return_value = resources.Asset() - client.get_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_asset_actions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - call.return_value = service.ListActionsResponse() - client.list_asset_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = service.ListTasksResponse() - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = tasks.Task() - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = service.ListJobsResponse() - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_task_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = service.RunTaskResponse() - client.run_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.RunTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = tasks.Job() - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = None - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_environments_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - call.return_value = service.ListEnvironmentsResponse() - client.list_environments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListEnvironmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_environment_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - call.return_value = analyze.Environment() - client.get_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = service.ListSessionsResponse() - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListSessionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataplexServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lakes_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_lakes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_lake_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - )) - await client.get_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lake_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_lake_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakeActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_zones_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListZonesResponse( - next_page_token='next_page_token_value', - )) - await client.list_zones(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZonesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_zone_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - )) - await client.get_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_zone_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_zone_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZoneActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assets_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_asset_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - await client.get_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_asset_actions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListActionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_asset_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tasks_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) - await client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - )) - await client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_jobs_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListJobsResponse( - next_page_token='next_page_token_value', - )) - await client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_run_task_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.RunTaskResponse( - )) - await client.run_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.RunTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - )) - await client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_environments_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - )) - await client.list_environments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListEnvironmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_environment_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - )) - await client.get_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_sessions_empty_call_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListSessionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DataplexServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_lake_rest_bad_request(request_type=service.CreateLakeRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_lake(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateLakeRequest, - dict, -]) -def test_create_lake_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["lake"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'service_account': 'service_account_value', 'metastore': {'service': 'service_value'}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}, 'metastore_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'endpoint': 'endpoint_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateLakeRequest.meta.fields["lake"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["lake"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["lake"][field])): - del request_init["lake"][field][i][subfield] - else: - del request_init["lake"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_lake(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_lake_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_lake") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_lake_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_lake") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateLakeRequest.pb(service.CreateLakeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateLakeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_lake_rest_bad_request(request_type=service.UpdateLakeRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_lake(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateLakeRequest, - dict, -]) -def test_update_lake_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'lake': {'name': 'projects/sample1/locations/sample2/lakes/sample3'}} - request_init["lake"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'service_account': 'service_account_value', 'metastore': {'service': 'service_value'}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}, 'metastore_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'endpoint': 'endpoint_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateLakeRequest.meta.fields["lake"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["lake"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["lake"][field])): - del request_init["lake"][field][i][subfield] - else: - del request_init["lake"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_lake(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_lake_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_lake") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_lake_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_lake") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateLakeRequest.pb(service.UpdateLakeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UpdateLakeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_lake_rest_bad_request(request_type=service.DeleteLakeRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_lake(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteLakeRequest, - dict, -]) -def test_delete_lake_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_lake(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_lake_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_lake") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_lake_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_lake") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteLakeRequest.pb(service.DeleteLakeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteLakeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_lakes_rest_bad_request(request_type=service.ListLakesRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_lakes(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakesRequest, - dict, -]) -def test_list_lakes_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListLakesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListLakesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_lakes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_lakes_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lakes") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lakes_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_lakes") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListLakesRequest.pb(service.ListLakesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListLakesResponse.to_json(service.ListLakesResponse()) - req.return_value.content = return_value - - request = service.ListLakesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListLakesResponse() - post_with_metadata.return_value = service.ListLakesResponse(), metadata - - client.list_lakes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_lake_rest_bad_request(request_type=service.GetLakeRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_lake(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetLakeRequest, - dict, -]) -def test_get_lake_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Lake( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - service_account='service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Lake.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_lake(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Lake) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.service_account == 'service_account_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_lake_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_lake") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_lake_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_lake") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetLakeRequest.pb(service.GetLakeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.Lake.to_json(resources.Lake()) - req.return_value.content = return_value - - request = service.GetLakeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = resources.Lake() - post_with_metadata.return_value = resources.Lake(), metadata - - client.get_lake(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_lake_actions_rest_bad_request(request_type=service.ListLakeActionsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_lake_actions(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListLakeActionsRequest, - dict, -]) -def test_list_lake_actions_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_lake_actions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLakeActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_lake_actions_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lake_actions") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_lake_actions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_lake_actions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListLakeActionsRequest.pb(service.ListLakeActionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) - req.return_value.content = return_value - - request = service.ListLakeActionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListActionsResponse() - post_with_metadata.return_value = service.ListActionsResponse(), metadata - - client.list_lake_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_zone_rest_bad_request(request_type=service.CreateZoneRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_zone(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateZoneRequest, - dict, -]) -def test_create_zone_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request_init["zone"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'type_': 1, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'resource_spec': {'location_type': 1}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateZoneRequest.meta.fields["zone"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["zone"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["zone"][field])): - del request_init["zone"][field][i][subfield] - else: - del request_init["zone"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_zone(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_zone_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_zone") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_zone_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_zone") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateZoneRequest.pb(service.CreateZoneRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateZoneRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_zone_rest_bad_request(request_type=service.UpdateZoneRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_zone(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateZoneRequest, - dict, -]) -def test_update_zone_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'zone': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'}} - request_init["zone"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'type_': 1, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'resource_spec': {'location_type': 1}, 'asset_status': {'update_time': {}, 'active_assets': 1390, 'security_policy_applying_assets': 3356}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateZoneRequest.meta.fields["zone"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["zone"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["zone"][field])): - del request_init["zone"][field][i][subfield] - else: - del request_init["zone"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_zone(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_zone_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_zone") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_zone_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_zone") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateZoneRequest.pb(service.UpdateZoneRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UpdateZoneRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_zone_rest_bad_request(request_type=service.DeleteZoneRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_zone(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteZoneRequest, - dict, -]) -def test_delete_zone_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_zone(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_zone_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_zone") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_zone_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_zone") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteZoneRequest.pb(service.DeleteZoneRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteZoneRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_zones_rest_bad_request(request_type=service.ListZonesRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_zones(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListZonesRequest, - dict, -]) -def test_list_zones_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListZonesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListZonesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_zones(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZonesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_zones_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zones") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zones_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_zones") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListZonesRequest.pb(service.ListZonesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListZonesResponse.to_json(service.ListZonesResponse()) - req.return_value.content = return_value - - request = service.ListZonesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListZonesResponse() - post_with_metadata.return_value = service.ListZonesResponse(), metadata - - client.list_zones(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_zone_rest_bad_request(request_type=service.GetZoneRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_zone(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetZoneRequest, - dict, -]) -def test_get_zone_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Zone( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - type_=resources.Zone.Type.RAW, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Zone.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_zone(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Zone) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - assert response.type_ == resources.Zone.Type.RAW - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_zone_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_zone") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_zone_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_zone") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetZoneRequest.pb(service.GetZoneRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.Zone.to_json(resources.Zone()) - req.return_value.content = return_value - - request = service.GetZoneRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = resources.Zone() - post_with_metadata.return_value = resources.Zone(), metadata - - client.get_zone(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_zone_actions_rest_bad_request(request_type=service.ListZoneActionsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_zone_actions(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListZoneActionsRequest, - dict, -]) -def test_list_zone_actions_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_zone_actions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListZoneActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_zone_actions_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zone_actions") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_zone_actions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_zone_actions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListZoneActionsRequest.pb(service.ListZoneActionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) - req.return_value.content = return_value - - request = service.ListZoneActionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListActionsResponse() - post_with_metadata.return_value = service.ListActionsResponse(), metadata - - client.list_zone_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_asset_rest_bad_request(request_type=service.CreateAssetRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_asset(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateAssetRequest, - dict, -]) -def test_create_asset_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request_init["asset"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'resource_spec': {'name': 'name_value', 'type_': 1, 'read_access_mode': 1}, 'resource_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'managed_access_identity': 'managed_access_identity_value'}, 'security_status': {'state': 1, 'message': 'message_value', 'update_time': {}}, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'discovery_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'last_run_time': {}, 'stats': {'data_items': 1051, 'data_size': 948, 'tables': 635, 'filesets': 863}, 'last_run_duration': {'seconds': 751, 'nanos': 543}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateAssetRequest.meta.fields["asset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["asset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["asset"][field])): - del request_init["asset"][field][i][subfield] - else: - del request_init["asset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_asset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_asset_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_asset") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_asset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_asset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateAssetRequest.pb(service.CreateAssetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateAssetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_asset_rest_bad_request(request_type=service.UpdateAssetRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_asset(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateAssetRequest, - dict, -]) -def test_update_asset_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'asset': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'}} - request_init["asset"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'resource_spec': {'name': 'name_value', 'type_': 1, 'read_access_mode': 1}, 'resource_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'managed_access_identity': 'managed_access_identity_value'}, 'security_status': {'state': 1, 'message': 'message_value', 'update_time': {}}, 'discovery_spec': {'enabled': True, 'include_patterns': ['include_patterns_value1', 'include_patterns_value2'], 'exclude_patterns': ['exclude_patterns_value1', 'exclude_patterns_value2'], 'csv_options': {'header_rows': 1171, 'delimiter': 'delimiter_value', 'encoding': 'encoding_value', 'disable_type_inference': True}, 'json_options': {'encoding': 'encoding_value', 'disable_type_inference': True}, 'schedule': 'schedule_value'}, 'discovery_status': {'state': 1, 'message': 'message_value', 'update_time': {}, 'last_run_time': {}, 'stats': {'data_items': 1051, 'data_size': 948, 'tables': 635, 'filesets': 863}, 'last_run_duration': {'seconds': 751, 'nanos': 543}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateAssetRequest.meta.fields["asset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["asset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["asset"][field])): - del request_init["asset"][field][i][subfield] - else: - del request_init["asset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_asset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_asset_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_asset") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_asset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_asset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateAssetRequest.pb(service.UpdateAssetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UpdateAssetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_asset_rest_bad_request(request_type=service.DeleteAssetRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_asset(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteAssetRequest, - dict, -]) -def test_delete_asset_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_asset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_asset_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_asset") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_asset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_asset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteAssetRequest.pb(service.DeleteAssetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteAssetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_assets_rest_bad_request(request_type=service.ListAssetsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assets(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetsRequest, - dict, -]) -def test_list_assets_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListAssetsRequest.pb(service.ListAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListAssetsResponse.to_json(service.ListAssetsResponse()) - req.return_value.content = return_value - - request = service.ListAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListAssetsResponse() - post_with_metadata.return_value = service.ListAssetsResponse(), metadata - - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_asset_rest_bad_request(request_type=service.GetAssetRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_asset(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetAssetRequest, - dict, -]) -def test_get_asset_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resources.Asset( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Asset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_asset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Asset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_asset_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_asset") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_asset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_asset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetAssetRequest.pb(service.GetAssetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.Asset.to_json(resources.Asset()) - req.return_value.content = return_value - - request = service.GetAssetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = resources.Asset() - post_with_metadata.return_value = resources.Asset(), metadata - - client.get_asset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_asset_actions_rest_bad_request(request_type=service.ListAssetActionsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_asset_actions(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListAssetActionsRequest, - dict, -]) -def test_list_asset_actions_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/assets/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListActionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListActionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_asset_actions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetActionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_asset_actions_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_asset_actions") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_asset_actions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_asset_actions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListAssetActionsRequest.pb(service.ListAssetActionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListActionsResponse.to_json(service.ListActionsResponse()) - req.return_value.content = return_value - - request = service.ListAssetActionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListActionsResponse() - post_with_metadata.return_value = service.ListActionsResponse(), metadata - - client.list_asset_actions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_task_rest_bad_request(request_type=service.CreateTaskRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_task(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateTaskRequest, - dict, -]) -def test_create_task_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request_init["task"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'state': 1, 'labels': {}, 'trigger_spec': {'type_': 1, 'start_time': {}, 'disabled': True, 'max_retries': 1187, 'schedule': 'schedule_value'}, 'execution_spec': {'args': {}, 'service_account': 'service_account_value', 'project': 'project_value', 'max_job_execution_lifetime': {'seconds': 751, 'nanos': 543}, 'kms_key': 'kms_key_value'}, 'execution_status': {'update_time': {}, 'latest_job': {'name': 'name_value', 'uid': 'uid_value', 'start_time': {}, 'end_time': {}, 'state': 1, 'retry_count': 1214, 'service': 1, 'service_job': 'service_job_value', 'message': 'message_value', 'labels': {}, 'trigger': 1, 'execution_spec': {}}}, 'spark': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'python_script_file': 'python_script_file_value', 'sql_script_file': 'sql_script_file_value', 'sql_script': 'sql_script_value', 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'infrastructure_spec': {'batch': {'executors_count': 1642, 'max_executors_count': 2063}, 'container_image': {'image': 'image_value', 'java_jars': ['java_jars_value1', 'java_jars_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}, 'vpc_network': {'network': 'network_value', 'sub_network': 'sub_network_value', 'network_tags': ['network_tags_value1', 'network_tags_value2']}}}, 'notebook': {'notebook': 'notebook_value', 'infrastructure_spec': {}, 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateTaskRequest.meta.fields["task"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["task"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["task"][field])): - del request_init["task"][field][i][subfield] - else: - del request_init["task"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_task(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_task_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_task") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateTaskRequest.pb(service.CreateTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_task_rest_bad_request(request_type=service.UpdateTaskRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_task(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateTaskRequest, - dict, -]) -def test_update_task_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'task': {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'}} - request_init["task"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'description': 'description_value', 'display_name': 'display_name_value', 'state': 1, 'labels': {}, 'trigger_spec': {'type_': 1, 'start_time': {}, 'disabled': True, 'max_retries': 1187, 'schedule': 'schedule_value'}, 'execution_spec': {'args': {}, 'service_account': 'service_account_value', 'project': 'project_value', 'max_job_execution_lifetime': {'seconds': 751, 'nanos': 543}, 'kms_key': 'kms_key_value'}, 'execution_status': {'update_time': {}, 'latest_job': {'name': 'name_value', 'uid': 'uid_value', 'start_time': {}, 'end_time': {}, 'state': 1, 'retry_count': 1214, 'service': 1, 'service_job': 'service_job_value', 'message': 'message_value', 'labels': {}, 'trigger': 1, 'execution_spec': {}}}, 'spark': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'python_script_file': 'python_script_file_value', 'sql_script_file': 'sql_script_file_value', 'sql_script': 'sql_script_value', 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'infrastructure_spec': {'batch': {'executors_count': 1642, 'max_executors_count': 2063}, 'container_image': {'image': 'image_value', 'java_jars': ['java_jars_value1', 'java_jars_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}, 'vpc_network': {'network': 'network_value', 'sub_network': 'sub_network_value', 'network_tags': ['network_tags_value1', 'network_tags_value2']}}}, 'notebook': {'notebook': 'notebook_value', 'infrastructure_spec': {}, 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateTaskRequest.meta.fields["task"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["task"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["task"][field])): - del request_init["task"][field][i][subfield] - else: - del request_init["task"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_task(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_task_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_task") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateTaskRequest.pb(service.UpdateTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UpdateTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_task_rest_bad_request(request_type=service.DeleteTaskRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_task(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteTaskRequest, - dict, -]) -def test_delete_task_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_task(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_task_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_task") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteTaskRequest.pb(service.DeleteTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_tasks_rest_bad_request(request_type=service.ListTasksRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_tasks(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListTasksRequest, - dict, -]) -def test_list_tasks_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_tasks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tasks_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_tasks") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_tasks_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_tasks") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListTasksRequest.pb(service.ListTasksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListTasksResponse.to_json(service.ListTasksResponse()) - req.return_value.content = return_value - - request = service.ListTasksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListTasksResponse() - post_with_metadata.return_value = service.ListTasksResponse(), metadata - - client.list_tasks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_task_rest_bad_request(request_type=service.GetTaskRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_task(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetTaskRequest, - dict, -]) -def test_get_task_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = tasks.Task( - name='name_value', - uid='uid_value', - description='description_value', - display_name='display_name_value', - state=resources.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = tasks.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Task) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.display_name == 'display_name_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_task_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_task") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetTaskRequest.pb(service.GetTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = tasks.Task.to_json(tasks.Task()) - req.return_value.content = return_value - - request = service.GetTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = tasks.Task() - post_with_metadata.return_value = tasks.Task(), metadata - - client.get_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_jobs_rest_bad_request(request_type=service.ListJobsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListJobsRequest, - dict, -]) -def test_list_jobs_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListJobsRequest.pb(service.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListJobsResponse.to_json(service.ListJobsResponse()) - req.return_value.content = return_value - - request = service.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListJobsResponse() - post_with_metadata.return_value = service.ListJobsResponse(), metadata - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_run_task_rest_bad_request(request_type=service.RunTaskRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.run_task(request) - - -@pytest.mark.parametrize("request_type", [ - service.RunTaskRequest, - dict, -]) -def test_run_task_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.RunTaskResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.RunTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.run_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, service.RunTaskResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_task_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_run_task") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_run_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_run_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.RunTaskRequest.pb(service.RunTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.RunTaskResponse.to_json(service.RunTaskResponse()) - req.return_value.content = return_value - - request = service.RunTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.RunTaskResponse() - post_with_metadata.return_value = service.RunTaskResponse(), metadata - - client.run_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_job_rest_bad_request(request_type=service.GetJobRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetJobRequest, - dict, -]) -def test_get_job_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = tasks.Job( - name='name_value', - uid='uid_value', - state=tasks.Job.State.RUNNING, - retry_count=1214, - service=tasks.Job.Service.DATAPROC, - service_job='service_job_value', - message='message_value', - trigger=tasks.Job.Trigger.TASK_CONFIG, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = tasks.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, tasks.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.state == tasks.Job.State.RUNNING - assert response.retry_count == 1214 - assert response.service == tasks.Job.Service.DATAPROC - assert response.service_job == 'service_job_value' - assert response.message == 'message_value' - assert response.trigger == tasks.Job.Trigger.TASK_CONFIG - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetJobRequest.pb(service.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = tasks.Job.to_json(tasks.Job()) - req.return_value.content = return_value - - request = service.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = tasks.Job() - post_with_metadata.return_value = tasks.Job(), metadata - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_job_rest_bad_request(request_type=service.CancelJobRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(request) - - -@pytest.mark.parametrize("request_type", [ - service.CancelJobRequest, - dict, -]) -def test_cancel_job_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/tasks/sample4/jobs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_cancel_job") as pre: - pre.assert_not_called() - pb_message = service.CancelJobRequest.pb(service.CancelJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = service.CancelJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_create_environment_rest_bad_request(request_type=service.CreateEnvironmentRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_environment(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateEnvironmentRequest, - dict, -]) -def test_create_environment_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request_init["environment"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'infrastructure_spec': {'compute': {'disk_size_gb': 1261, 'node_count': 1070, 'max_node_count': 1491}, 'os_image': {'image_version': 'image_version_value', 'java_libraries': ['java_libraries_value1', 'java_libraries_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}}, 'session_spec': {'max_idle_duration': {'seconds': 751, 'nanos': 543}, 'enable_fast_startup': True}, 'session_status': {'active': True}, 'endpoints': {'notebooks': 'notebooks_value', 'sql': 'sql_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateEnvironmentRequest.meta.fields["environment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["environment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["environment"][field])): - del request_init["environment"][field][i][subfield] - else: - del request_init["environment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_environment(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_environment_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_environment") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_create_environment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_create_environment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateEnvironmentRequest.pb(service.CreateEnvironmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateEnvironmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_environment_rest_bad_request(request_type=service.UpdateEnvironmentRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_environment(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateEnvironmentRequest, - dict, -]) -def test_update_environment_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'environment': {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'}} - request_init["environment"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4', 'display_name': 'display_name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'description': 'description_value', 'state': 1, 'infrastructure_spec': {'compute': {'disk_size_gb': 1261, 'node_count': 1070, 'max_node_count': 1491}, 'os_image': {'image_version': 'image_version_value', 'java_libraries': ['java_libraries_value1', 'java_libraries_value2'], 'python_packages': ['python_packages_value1', 'python_packages_value2'], 'properties': {}}}, 'session_spec': {'max_idle_duration': {'seconds': 751, 'nanos': 543}, 'enable_fast_startup': True}, 'session_status': {'active': True}, 'endpoints': {'notebooks': 'notebooks_value', 'sql': 'sql_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateEnvironmentRequest.meta.fields["environment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["environment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["environment"][field])): - del request_init["environment"][field][i][subfield] - else: - del request_init["environment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_environment(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_environment_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_environment") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_update_environment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_update_environment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateEnvironmentRequest.pb(service.UpdateEnvironmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UpdateEnvironmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_environment_rest_bad_request(request_type=service.DeleteEnvironmentRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_environment(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteEnvironmentRequest, - dict, -]) -def test_delete_environment_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_environment(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_environment_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_environment") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_delete_environment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_delete_environment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteEnvironmentRequest.pb(service.DeleteEnvironmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteEnvironmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_environments_rest_bad_request(request_type=service.ListEnvironmentsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_environments(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListEnvironmentsRequest, - dict, -]) -def test_list_environments_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListEnvironmentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_environments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_environments_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_environments") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_environments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_environments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListEnvironmentsRequest.pb(service.ListEnvironmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListEnvironmentsResponse.to_json(service.ListEnvironmentsResponse()) - req.return_value.content = return_value - - request = service.ListEnvironmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListEnvironmentsResponse() - post_with_metadata.return_value = service.ListEnvironmentsResponse(), metadata - - client.list_environments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_environment_rest_bad_request(request_type=service.GetEnvironmentRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_environment(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetEnvironmentRequest, - dict, -]) -def test_get_environment_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = analyze.Environment( - name='name_value', - display_name='display_name_value', - uid='uid_value', - description='description_value', - state=resources.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analyze.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_environment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, analyze.Environment) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.description == 'description_value' - assert response.state == resources.State.ACTIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_environment_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_environment") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_get_environment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_get_environment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetEnvironmentRequest.pb(service.GetEnvironmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analyze.Environment.to_json(analyze.Environment()) - req.return_value.content = return_value - - request = service.GetEnvironmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analyze.Environment() - post_with_metadata.return_value = analyze.Environment(), metadata - - client.get_environment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_sessions_rest_bad_request(request_type=service.ListSessionsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_sessions(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListSessionsRequest, - dict, -]) -def test_list_sessions_rest_call_success(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/environments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_sessions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sessions_rest_interceptors(null_interceptor): - transport = transports.DataplexServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataplexServiceRestInterceptor(), - ) - client = DataplexServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_sessions") as post, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataplexServiceRestInterceptor, "pre_list_sessions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListSessionsRequest.pb(service.ListSessionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListSessionsResponse.to_json(service.ListSessionsResponse()) - req.return_value.content = return_value - - request = service.ListSessionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListSessionsResponse() - post_with_metadata.return_value = service.ListSessionsResponse(), metadata - - client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lake_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lake), - '__call__') as call: - client.create_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_lake_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lake), - '__call__') as call: - client.update_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lake_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lake), - '__call__') as call: - client.delete_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lakes_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lakes), - '__call__') as call: - client.list_lakes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lake_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lake), - '__call__') as call: - client.get_lake(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetLakeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lake_actions_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lake_actions), - '__call__') as call: - client.list_lake_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListLakeActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_zone_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_zone), - '__call__') as call: - client.create_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_zone_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_zone), - '__call__') as call: - client.update_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_zone_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_zone), - '__call__') as call: - client.delete_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zones_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zones), - '__call__') as call: - client.list_zones(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZonesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_zone_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_zone), - '__call__') as call: - client.get_zone(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetZoneRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_zone_actions_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_zone_actions), - '__call__') as call: - client.list_zone_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListZoneActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_asset_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_asset), - '__call__') as call: - client.create_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_asset_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_asset), - '__call__') as call: - client.update_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_asset_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_asset), - '__call__') as call: - client.delete_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_asset_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_asset), - '__call__') as call: - client.get_asset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAssetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_asset_actions_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_asset_actions), - '__call__') as call: - client.list_asset_actions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListAssetActionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_task_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - client.create_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_task_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_task), - '__call__') as call: - client.update_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_task_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - client.delete_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_run_task_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - client.run_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.RunTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_environment_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_environment), - '__call__') as call: - client.create_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_environment_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_environment), - '__call__') as call: - client.update_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_environment_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_environment), - '__call__') as call: - client.delete_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_environments_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_environments), - '__call__') as call: - client.list_environments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListEnvironmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_environment_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_environment), - '__call__') as call: - client.get_environment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetEnvironmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListSessionsRequest() - - assert args[0] == request_msg - - -def test_dataplex_service_rest_lro_client(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataplexServiceGrpcTransport, - ) - -def test_dataplex_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataplexServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dataplex_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataplexServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_lake', - 'update_lake', - 'delete_lake', - 'list_lakes', - 'get_lake', - 'list_lake_actions', - 'create_zone', - 'update_zone', - 'delete_zone', - 'list_zones', - 'get_zone', - 'list_zone_actions', - 'create_asset', - 'update_asset', - 'delete_asset', - 'list_assets', - 'get_asset', - 'list_asset_actions', - 'create_task', - 'update_task', - 'delete_task', - 'list_tasks', - 'get_task', - 'list_jobs', - 'run_task', - 'get_job', - 'cancel_job', - 'create_environment', - 'update_environment', - 'delete_environment', - 'list_environments', - 'get_environment', - 'list_sessions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_dataplex_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataplexServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_dataplex_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.dataplex_service.transports.DataplexServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataplexServiceTransport() - adc.assert_called_once() - - -def test_dataplex_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataplexServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, - ], -) -def test_dataplex_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataplexServiceGrpcTransport, - transports.DataplexServiceGrpcAsyncIOTransport, - transports.DataplexServiceRestTransport, - ], -) -def test_dataplex_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataplexServiceGrpcTransport, grpc_helpers), - (transports.DataplexServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dataplex_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_dataplex_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DataplexServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dataplex_service_host_no_port(transport_name): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_dataplex_service_host_with_port(transport_name): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_dataplex_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DataplexServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DataplexServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_lake._session - session2 = client2.transport.create_lake._session - assert session1 != session2 - session1 = client1.transport.update_lake._session - session2 = client2.transport.update_lake._session - assert session1 != session2 - session1 = client1.transport.delete_lake._session - session2 = client2.transport.delete_lake._session - assert session1 != session2 - session1 = client1.transport.list_lakes._session - session2 = client2.transport.list_lakes._session - assert session1 != session2 - session1 = client1.transport.get_lake._session - session2 = client2.transport.get_lake._session - assert session1 != session2 - session1 = client1.transport.list_lake_actions._session - session2 = client2.transport.list_lake_actions._session - assert session1 != session2 - session1 = client1.transport.create_zone._session - session2 = client2.transport.create_zone._session - assert session1 != session2 - session1 = client1.transport.update_zone._session - session2 = client2.transport.update_zone._session - assert session1 != session2 - session1 = client1.transport.delete_zone._session - session2 = client2.transport.delete_zone._session - assert session1 != session2 - session1 = client1.transport.list_zones._session - session2 = client2.transport.list_zones._session - assert session1 != session2 - session1 = client1.transport.get_zone._session - session2 = client2.transport.get_zone._session - assert session1 != session2 - session1 = client1.transport.list_zone_actions._session - session2 = client2.transport.list_zone_actions._session - assert session1 != session2 - session1 = client1.transport.create_asset._session - session2 = client2.transport.create_asset._session - assert session1 != session2 - session1 = client1.transport.update_asset._session - session2 = client2.transport.update_asset._session - assert session1 != session2 - session1 = client1.transport.delete_asset._session - session2 = client2.transport.delete_asset._session - assert session1 != session2 - session1 = client1.transport.list_assets._session - session2 = client2.transport.list_assets._session - assert session1 != session2 - session1 = client1.transport.get_asset._session - session2 = client2.transport.get_asset._session - assert session1 != session2 - session1 = client1.transport.list_asset_actions._session - session2 = client2.transport.list_asset_actions._session - assert session1 != session2 - session1 = client1.transport.create_task._session - session2 = client2.transport.create_task._session - assert session1 != session2 - session1 = client1.transport.update_task._session - session2 = client2.transport.update_task._session - assert session1 != session2 - session1 = client1.transport.delete_task._session - session2 = client2.transport.delete_task._session - assert session1 != session2 - session1 = client1.transport.list_tasks._session - session2 = client2.transport.list_tasks._session - assert session1 != session2 - session1 = client1.transport.get_task._session - session2 = client2.transport.get_task._session - assert session1 != session2 - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.run_task._session - session2 = client2.transport.run_task._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.cancel_job._session - session2 = client2.transport.cancel_job._session - assert session1 != session2 - session1 = client1.transport.create_environment._session - session2 = client2.transport.create_environment._session - assert session1 != session2 - session1 = client1.transport.update_environment._session - session2 = client2.transport.update_environment._session - assert session1 != session2 - session1 = client1.transport.delete_environment._session - session2 = client2.transport.delete_environment._session - assert session1 != session2 - session1 = client1.transport.list_environments._session - session2 = client2.transport.list_environments._session - assert session1 != session2 - session1 = client1.transport.get_environment._session - session2 = client2.transport.get_environment._session - assert session1 != session2 - session1 = client1.transport.list_sessions._session - session2 = client2.transport.list_sessions._session - assert session1 != session2 -def test_dataplex_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataplexServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dataplex_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataplexServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataplexServiceGrpcTransport, transports.DataplexServiceGrpcAsyncIOTransport]) -def test_dataplex_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_dataplex_service_grpc_lro_client(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_dataplex_service_grpc_lro_async_client(): - client = DataplexServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_action_path(): - project = "squid" - location = "clam" - lake = "whelk" - action = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/actions/{action}".format(project=project, location=location, lake=lake, action=action, ) - actual = DataplexServiceClient.action_path(project, location, lake, action) - assert expected == actual - - -def test_parse_action_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "action": "mussel", - } - path = DataplexServiceClient.action_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_action_path(path) - assert expected == actual - -def test_asset_path(): - project = "winkle" - location = "nautilus" - lake = "scallop" - zone = "abalone" - asset = "squid" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/assets/{asset}".format(project=project, location=location, lake=lake, zone=zone, asset=asset, ) - actual = DataplexServiceClient.asset_path(project, location, lake, zone, asset) - assert expected == actual - - -def test_parse_asset_path(): - expected = { - "project": "clam", - "location": "whelk", - "lake": "octopus", - "zone": "oyster", - "asset": "nudibranch", - } - path = DataplexServiceClient.asset_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_asset_path(path) - assert expected == actual - -def test_environment_path(): - project = "cuttlefish" - location = "mussel" - lake = "winkle" - environment = "nautilus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format(project=project, location=location, lake=lake, environment=environment, ) - actual = DataplexServiceClient.environment_path(project, location, lake, environment) - assert expected == actual - - -def test_parse_environment_path(): - expected = { - "project": "scallop", - "location": "abalone", - "lake": "squid", - "environment": "clam", - } - path = DataplexServiceClient.environment_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_environment_path(path) - assert expected == actual - -def test_job_path(): - project = "whelk" - location = "octopus" - lake = "oyster" - task = "nudibranch" - job = "cuttlefish" - expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format(project=project, location=location, lake=lake, task=task, job=job, ) - actual = DataplexServiceClient.job_path(project, location, lake, task, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "mussel", - "location": "winkle", - "lake": "nautilus", - "task": "scallop", - "job": "abalone", - } - path = DataplexServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_job_path(path) - assert expected == actual - -def test_lake_path(): - project = "squid" - location = "clam" - lake = "whelk" - expected = "projects/{project}/locations/{location}/lakes/{lake}".format(project=project, location=location, lake=lake, ) - actual = DataplexServiceClient.lake_path(project, location, lake) - assert expected == actual - - -def test_parse_lake_path(): - expected = { - "project": "octopus", - "location": "oyster", - "lake": "nudibranch", - } - path = DataplexServiceClient.lake_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_lake_path(path) - assert expected == actual - -def test_session_path(): - project = "cuttlefish" - location = "mussel" - lake = "winkle" - environment = "nautilus" - session = "scallop" - expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format(project=project, location=location, lake=lake, environment=environment, session=session, ) - actual = DataplexServiceClient.session_path(project, location, lake, environment, session) - assert expected == actual - - -def test_parse_session_path(): - expected = { - "project": "abalone", - "location": "squid", - "lake": "clam", - "environment": "whelk", - "session": "octopus", - } - path = DataplexServiceClient.session_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_session_path(path) - assert expected == actual - -def test_task_path(): - project = "oyster" - location = "nudibranch" - lake = "cuttlefish" - task = "mussel" - expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format(project=project, location=location, lake=lake, task=task, ) - actual = DataplexServiceClient.task_path(project, location, lake, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "lake": "scallop", - "task": "abalone", - } - path = DataplexServiceClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_task_path(path) - assert expected == actual - -def test_zone_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - actual = DataplexServiceClient.zone_path(project, location, lake, zone) - assert expected == actual - - -def test_parse_zone_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "zone": "mussel", - } - path = DataplexServiceClient.zone_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_zone_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataplexServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DataplexServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataplexServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DataplexServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataplexServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DataplexServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DataplexServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DataplexServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataplexServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DataplexServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataplexServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataplexServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataplexServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataplexServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DataplexServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport), - (DataplexServiceAsyncClient, transports.DataplexServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py deleted file mode 100644 index e151c6f1dc97..000000000000 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ /dev/null @@ -1,9404 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceAsyncClient -from google.cloud.dataplex_v1.services.metadata_service import MetadataServiceClient -from google.cloud.dataplex_v1.services.metadata_service import pagers -from google.cloud.dataplex_v1.services.metadata_service import transports -from google.cloud.dataplex_v1.types import metadata_ -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetadataServiceClient._get_default_mtls_endpoint(None) is None - assert MetadataServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetadataServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetadataServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MetadataServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetadataServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetadataServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MetadataServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MetadataServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MetadataServiceClient._get_client_cert_source(None, False) is None - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MetadataServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MetadataServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MetadataServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetadataServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetadataServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MetadataServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MetadataServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MetadataServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MetadataServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MetadataServiceClient._get_universe_domain(None, None) == MetadataServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MetadataServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = MetadataServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = MetadataServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetadataServiceClient, "grpc"), - (MetadataServiceAsyncClient, "grpc_asyncio"), - (MetadataServiceClient, "rest"), -]) -def test_metadata_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetadataServiceGrpcTransport, "grpc"), - (transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetadataServiceRestTransport, "rest"), -]) -def test_metadata_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetadataServiceClient, "grpc"), - (MetadataServiceAsyncClient, "grpc_asyncio"), - (MetadataServiceClient, "rest"), -]) -def test_metadata_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' - ) - - -def test_metadata_service_client_get_transport_class(): - transport = MetadataServiceClient.get_transport_class() - available_transports = [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceRestTransport, - ] - assert transport in available_transports - - transport = MetadataServiceClient.get_transport_class("grpc") - assert transport == transports.MetadataServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest"), -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test_metadata_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetadataServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "true"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", "false"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", "true"), - (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metadata_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MetadataServiceClient, MetadataServiceAsyncClient -]) -@mock.patch.object(MetadataServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetadataServiceAsyncClient)) -def test_metadata_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MetadataServiceClient, MetadataServiceAsyncClient -]) -@mock.patch.object(MetadataServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceClient)) -@mock.patch.object(MetadataServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetadataServiceAsyncClient)) -def test_metadata_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetadataServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest"), -]) -def test_metadata_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetadataServiceClient, transports.MetadataServiceRestTransport, "rest", None), -]) -def test_metadata_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_metadata_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetadataServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc", grpc_helpers), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metadata_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.CreateEntityRequest, - dict, -]) -def test_create_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.CreateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_create_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.CreateEntityRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.CreateEntityRequest( - parent='parent_value', - ) - -def test_create_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc - request = {} - client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_entity] = mock_rpc - - request = {} - await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreateEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.CreateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_create_entity_async_from_dict(): - await test_create_entity_async(request_type=dict) - -def test_create_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreateEntityRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreateEntityRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entity( - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entity - mock_val = metadata_.Entity(name='name_value') - assert arg == mock_val - - -def test_create_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entity( - metadata_.CreateEntityRequest(), - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entity( - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entity - mock_val = metadata_.Entity(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entity( - metadata_.CreateEntityRequest(), - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.UpdateEntityRequest, - dict, -]) -def test_update_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.UpdateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_update_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.UpdateEntityRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.UpdateEntityRequest( - ) - -def test_update_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc - request = {} - client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_entity] = mock_rpc - - request = {} - await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.UpdateEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.UpdateEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_update_entity_async_from_dict(): - await test_update_entity_async(request_type=dict) - -def test_update_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.UpdateEntityRequest() - - request.entity.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entity.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.UpdateEntityRequest() - - request.entity.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entity.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeleteEntityRequest, - dict, -]) -def test_delete_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.DeleteEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.DeleteEntityRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.DeleteEntityRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc - request = {} - client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_entity] = mock_rpc - - request = {} - await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeleteEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.DeleteEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entity_async_from_dict(): - await test_delete_entity_async(request_type=dict) - -def test_delete_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeleteEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = None - client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeleteEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entity( - metadata_.DeleteEntityRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entity( - metadata_.DeleteEntityRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetEntityRequest, - dict, -]) -def test_get_entity(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - response = client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.GetEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -def test_get_entity_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.GetEntityRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_entity(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetEntityRequest( - name='name_value', - ) - -def test_get_entity_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc - request = {} - client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entity_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_entity in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_entity] = mock_rpc - - request = {} - await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_entity_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetEntityRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - response = await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.GetEntityRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_get_entity_async_from_dict(): - await test_get_entity_async(request_type=dict) - -def test_get_entity_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entity_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entity_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entity_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entity( - metadata_.GetEntityRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entity_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Entity() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entity( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entity_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entity( - metadata_.GetEntityRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListEntitiesRequest, - dict, -]) -def test_list_entities(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.ListEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entities_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.ListEntitiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_entities(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListEntitiesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_entities_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entities in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc - request = {} - client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entities(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entities_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_entities in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_entities] = mock_rpc - - request = {} - await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_entities(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_entities_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListEntitiesRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.ListEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entities_async_from_dict(): - await test_list_entities_async(request_type=dict) - -def test_list_entities_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = metadata_.ListEntitiesResponse() - client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entities_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) - await client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entities_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entities( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entities_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entities( - metadata_.ListEntitiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entities_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entities( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entities_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entities( - metadata_.ListEntitiesRequest(), - parent='parent_value', - ) - - -def test_list_entities_pager(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entities(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Entity) - for i in results) -def test_list_entities_pages(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entities(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entities_async_pager(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entities(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metadata_.Entity) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entities_async_pages(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entities(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metadata_.CreatePartitionRequest, - dict, -]) -def test_create_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - response = client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.CreatePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -def test_create_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.CreatePartitionRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.CreatePartitionRequest( - parent='parent_value', - ) - -def test_create_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc - request = {} - client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_partition] = mock_rpc - - request = {} - await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.CreatePartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - response = await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.CreatePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_partition_async_from_dict(): - await test_create_partition_async(request_type=dict) - -def test_create_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreatePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.CreatePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - await client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_partition( - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].partition - mock_val = metadata_.Partition(name='name_value') - assert arg == mock_val - - -def test_create_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_partition( - metadata_.CreatePartitionRequest(), - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_partition( - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].partition - mock_val = metadata_.Partition(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_partition( - metadata_.CreatePartitionRequest(), - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeletePartitionRequest, - dict, -]) -def test_delete_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.DeletePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.DeletePartitionRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.DeletePartitionRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc - request = {} - client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_partition] = mock_rpc - - request = {} - await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.DeletePartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.DeletePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_partition_async_from_dict(): - await test_delete_partition_async(request_type=dict) - -def test_delete_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeletePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = None - client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.DeletePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_partition( - metadata_.DeletePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_partition( - metadata_.DeletePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetPartitionRequest, - dict, -]) -def test_get_partition(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - response = client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.GetPartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -def test_get_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.GetPartitionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetPartitionRequest( - name='name_value', - ) - -def test_get_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc - request = {} - client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_partition] = mock_rpc - - request = {} - await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_partition_async(transport: str = 'grpc_asyncio', request_type=metadata_.GetPartitionRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - response = await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.GetPartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_partition_async_from_dict(): - await test_get_partition_async(request_type=dict) - -def test_get_partition_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetPartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_partition_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.GetPartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - await client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_partition_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_partition_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_partition( - metadata_.GetPartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_partition_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.Partition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_partition_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_partition( - metadata_.GetPartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListPartitionsRequest, - dict, -]) -def test_list_partitions(request_type, transport: str = 'grpc'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metadata_.ListPartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPartitionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_partitions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metadata_.ListPartitionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_partitions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListPartitionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_partitions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc - request = {} - client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_partitions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_partitions] = mock_rpc - - request = {} - await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_partitions_async(transport: str = 'grpc_asyncio', request_type=metadata_.ListPartitionsRequest): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metadata_.ListPartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPartitionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_partitions_async_from_dict(): - await test_list_partitions_async(request_type=dict) - -def test_list_partitions_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListPartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = metadata_.ListPartitionsResponse() - client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_partitions_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metadata_.ListPartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) - await client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_partitions_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_partitions_flattened_error(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_partitions( - metadata_.ListPartitionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_partitions_flattened_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metadata_.ListPartitionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_partitions_flattened_error_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_partitions( - metadata_.ListPartitionsRequest(), - parent='parent_value', - ) - - -def test_list_partitions_pager(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_partitions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Partition) - for i in results) -def test_list_partitions_pages(transport_name: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - pages = list(client.list_partitions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_partitions_async_pager(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_partitions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metadata_.Partition) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_partitions_async_pages(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_partitions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_entity_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_entity] = mock_rpc - - request = {} - client.create_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_entity_rest_required_fields(request_type=metadata_.CreateEntityRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entity._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_entity._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_entity(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_entity_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_entity._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "entity", ))) - - -def test_create_entity_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_entity(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities" % client.transport._host, args[1]) - - -def test_create_entity_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entity( - metadata_.CreateEntityRequest(), - parent='parent_value', - entity=metadata_.Entity(name='name_value'), - ) - - -def test_update_entity_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_entity] = mock_rpc - - request = {} - client.update_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_entity_rest_required_fields(request_type=metadata_.UpdateEntityRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entity._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_entity._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_entity(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_entity_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_entity._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("entity", ))) - - -def test_delete_entity_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_entity] = mock_rpc - - request = {} - client.delete_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_entity_rest_required_fields(request_type=metadata_.DeleteEntityRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["etag"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "etag" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entity._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "etag" in jsonified_request - assert jsonified_request["etag"] == request_init["etag"] - - jsonified_request["name"] = 'name_value' - jsonified_request["etag"] = 'etag_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_entity._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "etag" in jsonified_request - assert jsonified_request["etag"] == 'etag_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_entity(request) - - expected_params = [ - ( - "etag", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_entity_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_entity._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", "etag", ))) - - -def test_delete_entity_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_entity(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}" % client.transport._host, args[1]) - - -def test_delete_entity_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entity( - metadata_.DeleteEntityRequest(), - name='name_value', - ) - - -def test_get_entity_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_entity in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_entity] = mock_rpc - - request = {} - client.get_entity(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_entity(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_entity_rest_required_fields(request_type=metadata_.GetEntityRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entity._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_entity._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_entity(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_entity_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_entity._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_entity_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_entity(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*}" % client.transport._host, args[1]) - - -def test_get_entity_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entity( - metadata_.GetEntityRequest(), - name='name_value', - ) - - -def test_list_entities_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_entities in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entities] = mock_rpc - - request = {} - client.list_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_entities(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_entities_rest_required_fields(request_type=metadata_.ListEntitiesRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entities._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.ListEntitiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.ListEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_entities_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "view", )) & set(("parent", "view", ))) - - -def test_list_entities_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.ListEntitiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.ListEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*}/entities" % client.transport._host, args[1]) - - -def test_list_entities_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entities( - metadata_.ListEntitiesRequest(), - parent='parent_value', - ) - - -def test_list_entities_rest_pager(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - metadata_.Entity(), - ], - next_page_token='abc', - ), - metadata_.ListEntitiesResponse( - entities=[], - next_page_token='def', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - ], - next_page_token='ghi', - ), - metadata_.ListEntitiesResponse( - entities=[ - metadata_.Entity(), - metadata_.Entity(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metadata_.ListEntitiesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - - pager = client.list_entities(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Entity) - for i in results) - - pages = list(client.list_entities(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_partition] = mock_rpc - - request = {} - client.create_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_partition_rest_required_fields(request_type=metadata_.CreatePartitionRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_partition._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_partition_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("parent", "partition", ))) - - -def test_create_partition_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions" % client.transport._host, args[1]) - - -def test_create_partition_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_partition( - metadata_.CreatePartitionRequest(), - parent='parent_value', - partition=metadata_.Partition(name='name_value'), - ) - - -def test_delete_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_partition] = mock_rpc - - request = {} - client.delete_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_partition_rest_required_fields(request_type=metadata_.DeletePartitionRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_partition._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_partition_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_partition_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}" % client.transport._host, args[1]) - - -def test_delete_partition_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_partition( - metadata_.DeletePartitionRequest(), - name='name_value', - ) - - -def test_get_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_partition] = mock_rpc - - request = {} - client.get_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_partition_rest_required_fields(request_type=metadata_.GetPartitionRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_partition_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_partition_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/lakes/*/zones/*/entities/*/partitions/**}" % client.transport._host, args[1]) - - -def test_get_partition_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_partition( - metadata_.GetPartitionRequest(), - name='name_value', - ) - - -def test_list_partitions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_partitions] = mock_rpc - - request = {} - client.list_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_partitions_rest_required_fields(request_type=metadata_.ListPartitionsRequest): - transport_class = transports.MetadataServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_partitions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_partitions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metadata_.ListPartitionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.ListPartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_partitions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_partitions_rest_unset_required_fields(): - transport = transports.MetadataServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_partitions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_partitions_rest_flattened(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.ListPartitionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metadata_.ListPartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_partitions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/lakes/*/zones/*/entities/*}/partitions" % client.transport._host, args[1]) - - -def test_list_partitions_rest_flattened_error(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_partitions( - metadata_.ListPartitionsRequest(), - parent='parent_value', - ) - - -def test_list_partitions_rest_pager(transport: str = 'rest'): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - metadata_.Partition(), - ], - next_page_token='abc', - ), - metadata_.ListPartitionsResponse( - partitions=[], - next_page_token='def', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - ], - next_page_token='ghi', - ), - metadata_.ListPartitionsResponse( - partitions=[ - metadata_.Partition(), - metadata_.Partition(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metadata_.ListPartitionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - - pager = client.list_partitions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metadata_.Partition) - for i in results) - - pages = list(client.list_partitions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetadataServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetadataServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetadataServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetadataServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, - transports.MetadataServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MetadataServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.create_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.update_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.UpdateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - call.return_value = None - client.delete_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeleteEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entity_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - call.return_value = metadata_.Entity() - client.get_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entities_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - call.return_value = metadata_.ListEntitiesResponse() - client.list_entities(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListEntitiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.create_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreatePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - call.return_value = None - client.delete_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeletePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_partition_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - call.return_value = metadata_.Partition() - client.get_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetPartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_partitions_empty_call_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - call.return_value = metadata_.ListPartitionsResponse() - client.list_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListPartitionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MetadataServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.create_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.update_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.UpdateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeleteEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entity_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - )) - await client.get_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entities_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - )) - await client.list_entities(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListEntitiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - await client.create_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreatePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeletePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_partition_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - )) - await client.get_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetPartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_partitions_empty_call_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListPartitionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = MetadataServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_entity_rest_bad_request(request_type=metadata_.CreateEntityRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entity(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.CreateEntityRequest, - dict, -]) -def test_create_entity_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request_init["entity"] = {'name': 'name_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'id': 'id_value', 'etag': 'etag_value', 'type_': 1, 'asset': 'asset_value', 'data_path': 'data_path_value', 'data_path_pattern': 'data_path_pattern_value', 'catalog_entry': 'catalog_entry_value', 'system': 1, 'format_': {'format_': 1, 'compression_format': 2, 'mime_type': 'mime_type_value', 'csv': {'encoding': 'encoding_value', 'header_rows': 1171, 'delimiter': 'delimiter_value', 'quote': 'quote_value'}, 'json': {'encoding': 'encoding_value'}, 'iceberg': {'metadata_location': 'metadata_location_value'}}, 'compatibility': {'hive_metastore': {'compatible': True, 'reason': 'reason_value'}, 'bigquery': {}}, 'access': {'read': 1}, 'uid': 'uid_value', 'schema': {'user_managed': True, 'fields': [{'name': 'name_value', 'description': 'description_value', 'type_': 1, 'mode': 1, 'fields': {}}], 'partition_fields': [{'name': 'name_value', 'type_': 1}], 'partition_style': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metadata_.CreateEntityRequest.meta.fields["entity"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entity"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entity"][field])): - del request_init["entity"][field][i][subfield] - else: - del request_init["entity"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entity(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entity_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_entity") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_entity_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_create_entity") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.CreateEntityRequest.pb(metadata_.CreateEntityRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.Entity.to_json(metadata_.Entity()) - req.return_value.content = return_value - - request = metadata_.CreateEntityRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.Entity() - post_with_metadata.return_value = metadata_.Entity(), metadata - - client.create_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_entity_rest_bad_request(request_type=metadata_.UpdateEntityRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'entity': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entity(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.UpdateEntityRequest, - dict, -]) -def test_update_entity_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'entity': {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'}} - request_init["entity"] = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'id': 'id_value', 'etag': 'etag_value', 'type_': 1, 'asset': 'asset_value', 'data_path': 'data_path_value', 'data_path_pattern': 'data_path_pattern_value', 'catalog_entry': 'catalog_entry_value', 'system': 1, 'format_': {'format_': 1, 'compression_format': 2, 'mime_type': 'mime_type_value', 'csv': {'encoding': 'encoding_value', 'header_rows': 1171, 'delimiter': 'delimiter_value', 'quote': 'quote_value'}, 'json': {'encoding': 'encoding_value'}, 'iceberg': {'metadata_location': 'metadata_location_value'}}, 'compatibility': {'hive_metastore': {'compatible': True, 'reason': 'reason_value'}, 'bigquery': {}}, 'access': {'read': 1}, 'uid': 'uid_value', 'schema': {'user_managed': True, 'fields': [{'name': 'name_value', 'description': 'description_value', 'type_': 1, 'mode': 1, 'fields': {}}], 'partition_fields': [{'name': 'name_value', 'type_': 1}], 'partition_style': 1}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metadata_.UpdateEntityRequest.meta.fields["entity"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entity"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entity"][field])): - del request_init["entity"][field][i][subfield] - else: - del request_init["entity"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entity(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entity_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_update_entity") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_update_entity_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_update_entity") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.UpdateEntityRequest.pb(metadata_.UpdateEntityRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.Entity.to_json(metadata_.Entity()) - req.return_value.content = return_value - - request = metadata_.UpdateEntityRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.Entity() - post_with_metadata.return_value = metadata_.Entity(), metadata - - client.update_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entity_rest_bad_request(request_type=metadata_.DeleteEntityRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entity(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeleteEntityRequest, - dict, -]) -def test_delete_entity_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entity(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entity_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_delete_entity") as pre: - pre.assert_not_called() - pb_message = metadata_.DeleteEntityRequest.pb(metadata_.DeleteEntityRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = metadata_.DeleteEntityRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_entity_rest_bad_request(request_type=metadata_.GetEntityRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entity(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetEntityRequest, - dict, -]) -def test_get_entity_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Entity( - name='name_value', - display_name='display_name_value', - description='description_value', - id='id_value', - etag='etag_value', - type_=metadata_.Entity.Type.TABLE, - asset='asset_value', - data_path='data_path_value', - data_path_pattern='data_path_pattern_value', - catalog_entry='catalog_entry_value', - system=metadata_.StorageSystem.CLOUD_STORAGE, - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Entity.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entity(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Entity) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.id == 'id_value' - assert response.etag == 'etag_value' - assert response.type_ == metadata_.Entity.Type.TABLE - assert response.asset == 'asset_value' - assert response.data_path == 'data_path_value' - assert response.data_path_pattern == 'data_path_pattern_value' - assert response.catalog_entry == 'catalog_entry_value' - assert response.system == metadata_.StorageSystem.CLOUD_STORAGE - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entity_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_entity") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_entity_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_get_entity") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.GetEntityRequest.pb(metadata_.GetEntityRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.Entity.to_json(metadata_.Entity()) - req.return_value.content = return_value - - request = metadata_.GetEntityRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.Entity() - post_with_metadata.return_value = metadata_.Entity(), metadata - - client.get_entity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_entities_rest_bad_request(request_type=metadata_.ListEntitiesRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entities(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListEntitiesRequest, - dict, -]) -def test_list_entities_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.ListEntitiesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.ListEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entities(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entities_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_entities") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_entities_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_list_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.ListEntitiesRequest.pb(metadata_.ListEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.ListEntitiesResponse.to_json(metadata_.ListEntitiesResponse()) - req.return_value.content = return_value - - request = metadata_.ListEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.ListEntitiesResponse() - post_with_metadata.return_value = metadata_.ListEntitiesResponse(), metadata - - client.list_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_partition_rest_bad_request(request_type=metadata_.CreatePartitionRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_partition(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.CreatePartitionRequest, - dict, -]) -def test_create_partition_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request_init["partition"] = {'name': 'name_value', 'values': ['values_value1', 'values_value2'], 'location': 'location_value', 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metadata_.CreatePartitionRequest.meta.fields["partition"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["partition"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["partition"][field])): - del request_init["partition"][field][i][subfield] - else: - del request_init["partition"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_partition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_partition_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_partition") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_create_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_create_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.CreatePartitionRequest.pb(metadata_.CreatePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.Partition.to_json(metadata_.Partition()) - req.return_value.content = return_value - - request = metadata_.CreatePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.Partition() - post_with_metadata.return_value = metadata_.Partition(), metadata - - client.create_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_partition_rest_bad_request(request_type=metadata_.DeletePartitionRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_partition(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.DeletePartitionRequest, - dict, -]) -def test_delete_partition_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_partition(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_partition_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_delete_partition") as pre: - pre.assert_not_called() - pb_message = metadata_.DeletePartitionRequest.pb(metadata_.DeletePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = metadata_.DeletePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_partition_rest_bad_request(request_type=metadata_.GetPartitionRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_partition(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.GetPartitionRequest, - dict, -]) -def test_get_partition_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5/partitions/sample6'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.Partition( - name='name_value', - values=['values_value'], - location='location_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.Partition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_partition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metadata_.Partition) - assert response.name == 'name_value' - assert response.values == ['values_value'] - assert response.location == 'location_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_partition_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_partition") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_get_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_get_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.GetPartitionRequest.pb(metadata_.GetPartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.Partition.to_json(metadata_.Partition()) - req.return_value.content = return_value - - request = metadata_.GetPartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.Partition() - post_with_metadata.return_value = metadata_.Partition(), metadata - - client.get_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_partitions_rest_bad_request(request_type=metadata_.ListPartitionsRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_partitions(request) - - -@pytest.mark.parametrize("request_type", [ - metadata_.ListPartitionsRequest, - dict, -]) -def test_list_partitions_rest_call_success(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/lakes/sample3/zones/sample4/entities/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metadata_.ListPartitionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metadata_.ListPartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_partitions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPartitionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_partitions_rest_interceptors(null_interceptor): - transport = transports.MetadataServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetadataServiceRestInterceptor(), - ) - client = MetadataServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_partitions") as post, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "post_list_partitions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetadataServiceRestInterceptor, "pre_list_partitions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metadata_.ListPartitionsRequest.pb(metadata_.ListPartitionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metadata_.ListPartitionsResponse.to_json(metadata_.ListPartitionsResponse()) - req.return_value.content = return_value - - request = metadata_.ListPartitionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metadata_.ListPartitionsResponse() - post_with_metadata.return_value = metadata_.ListPartitionsResponse(), metadata - - client.list_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entity_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entity), - '__call__') as call: - client.create_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entity_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entity), - '__call__') as call: - client.update_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.UpdateEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entity_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entity), - '__call__') as call: - client.delete_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeleteEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entity_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_entity), - '__call__') as call: - client.get_entity(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetEntityRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entities_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), - '__call__') as call: - client.list_entities(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListEntitiesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_partition_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_partition), - '__call__') as call: - client.create_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.CreatePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_partition_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_partition), - '__call__') as call: - client.delete_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.DeletePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_partition_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partition), - '__call__') as call: - client.get_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.GetPartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_partitions_empty_call_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_partitions), - '__call__') as call: - client.list_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metadata_.ListPartitionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetadataServiceGrpcTransport, - ) - -def test_metadata_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetadataServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metadata_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetadataServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_entity', - 'update_entity', - 'delete_entity', - 'get_entity', - 'list_entities', - 'create_partition', - 'delete_partition', - 'get_partition', - 'list_partitions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_metadata_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetadataServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_metadata_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetadataServiceTransport() - adc.assert_called_once() - - -def test_metadata_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetadataServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, - ], -) -def test_metadata_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetadataServiceGrpcTransport, - transports.MetadataServiceGrpcAsyncIOTransport, - transports.MetadataServiceRestTransport, - ], -) -def test_metadata_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetadataServiceGrpcTransport, grpc_helpers), - (transports.MetadataServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metadata_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataplex.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataplex.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_metadata_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetadataServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metadata_service_host_no_port(transport_name): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metadata_service_host_with_port(transport_name): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metadata_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetadataServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = MetadataServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_entity._session - session2 = client2.transport.create_entity._session - assert session1 != session2 - session1 = client1.transport.update_entity._session - session2 = client2.transport.update_entity._session - assert session1 != session2 - session1 = client1.transport.delete_entity._session - session2 = client2.transport.delete_entity._session - assert session1 != session2 - session1 = client1.transport.get_entity._session - session2 = client2.transport.get_entity._session - assert session1 != session2 - session1 = client1.transport.list_entities._session - session2 = client2.transport.list_entities._session - assert session1 != session2 - session1 = client1.transport.create_partition._session - session2 = client2.transport.create_partition._session - assert session1 != session2 - session1 = client1.transport.delete_partition._session - session2 = client2.transport.delete_partition._session - assert session1 != session2 - session1 = client1.transport.get_partition._session - session2 = client2.transport.get_partition._session - assert session1 != session2 - session1 = client1.transport.list_partitions._session - session2 = client2.transport.list_partitions._session - assert session1 != session2 -def test_metadata_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetadataServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metadata_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetadataServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetadataServiceGrpcTransport, transports.MetadataServiceGrpcAsyncIOTransport]) -def test_metadata_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_entity_path(): - project = "squid" - location = "clam" - lake = "whelk" - zone = "octopus" - entity = "oyster" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, ) - actual = MetadataServiceClient.entity_path(project, location, lake, zone, entity) - assert expected == actual - - -def test_parse_entity_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "lake": "mussel", - "zone": "winkle", - "entity": "nautilus", - } - path = MetadataServiceClient.entity_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_entity_path(path) - assert expected == actual - -def test_partition_path(): - project = "scallop" - location = "abalone" - lake = "squid" - zone = "clam" - entity = "whelk" - partition = "octopus" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}/partitions/{partition}".format(project=project, location=location, lake=lake, zone=zone, entity=entity, partition=partition, ) - actual = MetadataServiceClient.partition_path(project, location, lake, zone, entity, partition) - assert expected == actual - - -def test_parse_partition_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "lake": "cuttlefish", - "zone": "mussel", - "entity": "winkle", - "partition": "nautilus", - } - path = MetadataServiceClient.partition_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_partition_path(path) - assert expected == actual - -def test_zone_path(): - project = "scallop" - location = "abalone" - lake = "squid" - zone = "clam" - expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format(project=project, location=location, lake=lake, zone=zone, ) - actual = MetadataServiceClient.zone_path(project, location, lake, zone) - assert expected == actual - - -def test_parse_zone_path(): - expected = { - "project": "whelk", - "location": "octopus", - "lake": "oyster", - "zone": "nudibranch", - } - path = MetadataServiceClient.zone_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_zone_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetadataServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = MetadataServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetadataServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = MetadataServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetadataServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = MetadataServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = MetadataServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = MetadataServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetadataServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = MetadataServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetadataServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetadataServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MetadataServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MetadataServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport), - (MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/business_glossary_service.rst similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/docs/dataplex_v1/business_glossary_service.rst rename to packages/google-cloud-dataplex/docs/dataplex_v1/business_glossary_service.rst diff --git a/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst index 4b62f5c74808..4f97a5efe7f1 100644 --- a/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst +++ b/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Dataplex v1 API .. toctree:: :maxdepth: 2 + business_glossary_service catalog_service cmek_service content_service diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py index 9f304058606c..c72588ce32f9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py @@ -18,6 +18,12 @@ __version__ = package_version.__version__ +from google.cloud.dataplex_v1.services.business_glossary_service.async_client import ( + BusinessGlossaryServiceAsyncClient, +) +from google.cloud.dataplex_v1.services.business_glossary_service.client import ( + BusinessGlossaryServiceClient, +) from google.cloud.dataplex_v1.services.catalog_service.async_client import ( CatalogServiceAsyncClient, ) @@ -59,6 +65,29 @@ MetadataServiceClient, ) from google.cloud.dataplex_v1.types.analyze import Content, Environment, Session +from google.cloud.dataplex_v1.types.business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from google.cloud.dataplex_v1.types.catalog import ( Aspect, AspectSource, @@ -66,20 +95,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -180,6 +213,9 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from google.cloud.dataplex_v1.types.datascans_common import ( + DataScanCatalogPublishingStatus, +) from google.cloud.dataplex_v1.types.logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -267,6 +303,8 @@ from google.cloud.dataplex_v1.types.tasks import Job, Task __all__ = ( + "BusinessGlossaryServiceClient", + "BusinessGlossaryServiceAsyncClient", "CatalogServiceClient", "CatalogServiceAsyncClient", "CmekServiceClient", @@ -284,25 +322,50 @@ "Content", "Environment", "Session", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "Aspect", "AspectSource", "AspectType", "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "Entry", "EntryGroup", + "EntryLink", "EntrySource", "EntryType", "GetAspectTypeRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetMetadataJobRequest", @@ -389,6 +452,7 @@ "RunDataScanResponse", "UpdateDataScanRequest", "DataScanType", + "DataScanCatalogPublishingStatus", "BusinessGlossaryEvent", "DataQualityScanRuleResult", "DataScanEvent", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py index 2b4d6bc39dfd..104b6f66fc5d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.business_glossary_service import ( + BusinessGlossaryServiceAsyncClient, + BusinessGlossaryServiceClient, +) from .services.catalog_service import CatalogServiceAsyncClient, CatalogServiceClient from .services.cmek_service import CmekServiceAsyncClient, CmekServiceClient from .services.content_service import ContentServiceAsyncClient, ContentServiceClient @@ -32,6 +36,29 @@ from .services.dataplex_service import DataplexServiceAsyncClient, DataplexServiceClient from .services.metadata_service import MetadataServiceAsyncClient, MetadataServiceClient from .types.analyze import Content, Environment, Session +from .types.business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from .types.catalog import ( Aspect, AspectSource, @@ -39,20 +66,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -147,6 +178,7 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from .types.datascans_common import DataScanCatalogPublishingStatus from .types.logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -227,6 +259,7 @@ from .types.tasks import Job, Task __all__ = ( + "BusinessGlossaryServiceAsyncClient", "CatalogServiceAsyncClient", "CmekServiceAsyncClient", "ContentServiceAsyncClient", @@ -241,6 +274,7 @@ "Asset", "AssetStatus", "BusinessGlossaryEvent", + "BusinessGlossaryServiceClient", "CancelJobRequest", "CancelMetadataJobRequest", "CatalogServiceClient", @@ -257,9 +291,13 @@ "CreateEncryptionConfigRequest", "CreateEntityRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateEnvironmentRequest", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", "CreateLakeRequest", "CreateMetadataJobRequest", "CreatePartitionRequest", @@ -281,6 +319,7 @@ "DataQualityScanRuleResult", "DataQualitySpec", "DataScan", + "DataScanCatalogPublishingStatus", "DataScanEvent", "DataScanJob", "DataScanServiceClient", @@ -299,9 +338,13 @@ "DeleteEncryptionConfigRequest", "DeleteEntityRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "DeleteEnvironmentRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", "DeleteLakeRequest", "DeletePartitionRequest", "DeleteTaskRequest", @@ -311,6 +354,7 @@ "Entity", "Entry", "EntryGroup", + "EntryLink", "EntryLinkEvent", "EntrySource", "EntryType", @@ -329,15 +373,22 @@ "GetEncryptionConfigRequest", "GetEntityRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetEnvironmentRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", "GetJobRequest", "GetLakeRequest", "GetMetadataJobRequest", "GetPartitionRequest", "GetTaskRequest", "GetZoneRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", "GovernanceEvent", "ImportItem", "Job", @@ -373,6 +424,12 @@ "ListEntryTypesResponse", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", "ListJobsRequest", "ListJobsResponse", "ListLakeActionsRequest", @@ -426,6 +483,9 @@ "UpdateEntryRequest", "UpdateEntryTypeRequest", "UpdateEnvironmentRequest", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "UpdateLakeRequest", "UpdateTaskRequest", "UpdateZoneRequest", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json index f9cbea29b093..dd1090ea5c40 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json @@ -5,6 +5,250 @@ "protoPackage": "google.cloud.dataplex.v1", "schema": "1.0", "services": { + "BusinessGlossaryService": { + "clients": { + "grpc": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BusinessGlossaryServiceAsyncClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "rest": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + } + } + }, "CatalogService": { "clients": { "grpc": { @@ -30,6 +274,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -55,6 +304,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -75,6 +329,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" @@ -165,6 +424,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -190,6 +454,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -210,6 +479,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" @@ -300,6 +574,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -325,6 +604,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -345,6 +629,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py similarity index 90% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py index c9e791266c6a..6adb2000e4a8 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BusinessGlossaryServiceClient from .async_client import BusinessGlossaryServiceAsyncClient +from .client import BusinessGlossaryServiceClient __all__ = ( - 'BusinessGlossaryServiceClient', - 'BusinessGlossaryServiceAsyncClient', + "BusinessGlossaryServiceClient", + "BusinessGlossaryServiceAsyncClient", ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py similarity index 82% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py index 576a6fd46a8f..31f7db5c5c13 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py @@ -13,21 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging as std_logging from collections import OrderedDict +import logging as std_logging import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataplex_v1 import gapic_version as package_version +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.dataplex_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -36,28 +46,31 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.business_glossary_service import pagers -from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport + +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary, service + from .client import BusinessGlossaryServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport +from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class BusinessGlossaryServiceAsyncClient: """BusinessGlossaryService provides APIs for managing business glossary resources for enterprise customers. @@ -74,25 +87,53 @@ class BusinessGlossaryServiceAsyncClient: # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_ENDPOINT_TEMPLATE = ( + BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) _DEFAULT_UNIVERSE = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE glossary_path = staticmethod(BusinessGlossaryServiceClient.glossary_path) - parse_glossary_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_path) - glossary_category_path = staticmethod(BusinessGlossaryServiceClient.glossary_category_path) - parse_glossary_category_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_category_path) + parse_glossary_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_path + ) + glossary_category_path = staticmethod( + BusinessGlossaryServiceClient.glossary_category_path + ) + parse_glossary_category_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_category_path + ) glossary_term_path = staticmethod(BusinessGlossaryServiceClient.glossary_term_path) - parse_glossary_term_path = staticmethod(BusinessGlossaryServiceClient.parse_glossary_term_path) - common_billing_account_path = staticmethod(BusinessGlossaryServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BusinessGlossaryServiceClient.parse_common_billing_account_path) + parse_glossary_term_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_term_path + ) + common_billing_account_path = staticmethod( + BusinessGlossaryServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(BusinessGlossaryServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(BusinessGlossaryServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(BusinessGlossaryServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(BusinessGlossaryServiceClient.parse_common_organization_path) - common_project_path = staticmethod(BusinessGlossaryServiceClient.common_project_path) - parse_common_project_path = staticmethod(BusinessGlossaryServiceClient.parse_common_project_path) - common_location_path = staticmethod(BusinessGlossaryServiceClient.common_location_path) - parse_common_location_path = staticmethod(BusinessGlossaryServiceClient.parse_common_location_path) + parse_common_folder_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BusinessGlossaryServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + BusinessGlossaryServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + BusinessGlossaryServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -128,7 +169,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -191,12 +234,20 @@ def universe_domain(self) -> str: get_transport_class = BusinessGlossaryServiceClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BusinessGlossaryServiceTransport, Callable[..., BusinessGlossaryServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessGlossaryServiceTransport, + Callable[..., BusinessGlossaryServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the business glossary service async client. Args: @@ -251,33 +302,41 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "credentialsType": None, - } + }, ) - async def create_glossary(self, - request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, - *, - parent: Optional[str] = None, - glossary: Optional[business_glossary.Glossary] = None, - glossary_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_glossary( + self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a new Glossary resource. .. code-block:: python @@ -357,10 +416,14 @@ async def sample_create_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, glossary, glossary_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -378,14 +441,14 @@ async def sample_create_glossary(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -410,15 +473,16 @@ async def sample_create_glossary(): # Done; return the response. return response - async def update_glossary(self, - request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, - *, - glossary: Optional[business_glossary.Glossary] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_glossary( + self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates a Glossary resource. .. code-block:: python @@ -491,10 +555,14 @@ async def sample_update_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [glossary, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -510,14 +578,16 @@ async def sample_update_glossary(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("glossary.name", request.glossary.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), ) # Validate the universe domain. @@ -542,14 +612,15 @@ async def sample_update_glossary(): # Done; return the response. return response - async def delete_glossary(self, - request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_glossary( + self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a Glossary resource. All the categories and terms within the Glossary must be deleted before the Glossary can be deleted. @@ -622,10 +693,14 @@ async def sample_delete_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -639,14 +714,14 @@ async def sample_delete_glossary(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -671,14 +746,15 @@ async def sample_delete_glossary(): # Done; return the response. return response - async def get_glossary(self, - request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.Glossary: + async def get_glossary( + self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: r"""Gets a Glossary resource. .. code-block:: python @@ -740,10 +816,14 @@ async def sample_get_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -757,14 +837,14 @@ async def sample_get_glossary(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -781,14 +861,15 @@ async def sample_get_glossary(): # Done; return the response. return response - async def list_glossaries(self, - request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossariesAsyncPager: + async def list_glossaries( + self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesAsyncPager: r"""Lists Glossary resources in a project and location. .. code-block:: python @@ -851,10 +932,14 @@ async def sample_list_glossaries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -868,14 +953,14 @@ async def sample_list_glossaries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossaries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossaries + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -903,16 +988,19 @@ async def sample_list_glossaries(): # Done; return the response. return response - async def create_glossary_category(self, - request: Optional[Union[business_glossary.CreateGlossaryCategoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - category: Optional[business_glossary.GlossaryCategory] = None, - category_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + async def create_glossary_category( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryCategoryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Creates a new GlossaryCategory resource. .. code-block:: python @@ -993,10 +1081,14 @@ async def sample_create_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, category, category_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1014,14 +1106,14 @@ async def sample_create_glossary_category(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary_category] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary_category + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1038,15 +1130,18 @@ async def sample_create_glossary_category(): # Done; return the response. return response - async def update_glossary_category(self, - request: Optional[Union[business_glossary.UpdateGlossaryCategoryRequest, dict]] = None, - *, - category: Optional[business_glossary.GlossaryCategory] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + async def update_glossary_category( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryCategoryRequest, dict] + ] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Updates a GlossaryCategory resource. .. code-block:: python @@ -1117,10 +1212,14 @@ async def sample_update_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [category, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1136,14 +1235,16 @@ async def sample_update_glossary_category(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary_category] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary_category + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("category.name", request.category.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("category.name", request.category.name),) + ), ) # Validate the universe domain. @@ -1160,14 +1261,17 @@ async def sample_update_glossary_category(): # Done; return the response. return response - async def delete_glossary_category(self, - request: Optional[Union[business_glossary.DeleteGlossaryCategoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_glossary_category( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a GlossaryCategory resource. All the GlossaryCategories and GlossaryTerms nested directly under the specified GlossaryCategory will be moved one @@ -1219,10 +1323,14 @@ async def sample_delete_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1236,14 +1344,14 @@ async def sample_delete_glossary_category(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary_category] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary_category + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1257,14 +1365,17 @@ async def sample_delete_glossary_category(): metadata=metadata, ) - async def get_glossary_category(self, - request: Optional[Union[business_glossary.GetGlossaryCategoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + async def get_glossary_category( + self, + request: Optional[ + Union[business_glossary.GetGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Gets a GlossaryCategory resource. .. code-block:: python @@ -1324,10 +1435,14 @@ async def sample_get_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1341,14 +1456,14 @@ async def sample_get_glossary_category(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary_category] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary_category + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1365,14 +1480,17 @@ async def sample_get_glossary_category(): # Done; return the response. return response - async def list_glossary_categories(self, - request: Optional[Union[business_glossary.ListGlossaryCategoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossaryCategoriesAsyncPager: + async def list_glossary_categories( + self, + request: Optional[ + Union[business_glossary.ListGlossaryCategoriesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesAsyncPager: r"""Lists GlossaryCategory resources in a Glossary. .. code-block:: python @@ -1435,10 +1553,14 @@ async def sample_list_glossary_categories(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1452,14 +1574,14 @@ async def sample_list_glossary_categories(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossary_categories] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossary_categories + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1487,16 +1609,19 @@ async def sample_list_glossary_categories(): # Done; return the response. return response - async def create_glossary_term(self, - request: Optional[Union[business_glossary.CreateGlossaryTermRequest, dict]] = None, - *, - parent: Optional[str] = None, - term: Optional[business_glossary.GlossaryTerm] = None, - term_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + async def create_glossary_term( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryTermRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Creates a new GlossaryTerm resource. .. code-block:: python @@ -1574,10 +1699,14 @@ async def sample_create_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, term, term_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1595,14 +1724,14 @@ async def sample_create_glossary_term(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_glossary_term] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary_term + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1619,15 +1748,18 @@ async def sample_create_glossary_term(): # Done; return the response. return response - async def update_glossary_term(self, - request: Optional[Union[business_glossary.UpdateGlossaryTermRequest, dict]] = None, - *, - term: Optional[business_glossary.GlossaryTerm] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + async def update_glossary_term( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryTermRequest, dict] + ] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Updates a GlossaryTerm resource. .. code-block:: python @@ -1699,10 +1831,14 @@ async def sample_update_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [term, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1718,14 +1854,16 @@ async def sample_update_glossary_term(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_glossary_term] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary_term + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("term.name", request.term.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("term.name", request.term.name),) + ), ) # Validate the universe domain. @@ -1742,14 +1880,17 @@ async def sample_update_glossary_term(): # Done; return the response. return response - async def delete_glossary_term(self, - request: Optional[Union[business_glossary.DeleteGlossaryTermRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_glossary_term( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryTermRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a GlossaryTerm resource. .. code-block:: python @@ -1798,10 +1939,14 @@ async def sample_delete_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1815,14 +1960,14 @@ async def sample_delete_glossary_term(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_glossary_term] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary_term + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1836,14 +1981,15 @@ async def sample_delete_glossary_term(): metadata=metadata, ) - async def get_glossary_term(self, - request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + async def get_glossary_term( + self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Gets a GlossaryTerm resource. .. code-block:: python @@ -1904,10 +2050,14 @@ async def sample_get_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1921,14 +2071,14 @@ async def sample_get_glossary_term(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_glossary_term] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary_term + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1945,14 +2095,17 @@ async def sample_get_glossary_term(): # Done; return the response. return response - async def list_glossary_terms(self, - request: Optional[Union[business_glossary.ListGlossaryTermsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossaryTermsAsyncPager: + async def list_glossary_terms( + self, + request: Optional[ + Union[business_glossary.ListGlossaryTermsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsAsyncPager: r"""Lists GlossaryTerm resources in a Glossary. .. code-block:: python @@ -2015,10 +2168,14 @@ async def sample_list_glossary_terms(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2032,14 +2189,14 @@ async def sample_list_glossary_terms(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_glossary_terms] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossary_terms + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2105,8 +2262,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2114,7 +2270,11 @@ async def list_operations( # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2157,8 +2317,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2166,7 +2325,11 @@ async def get_operation( # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2213,15 +2376,19 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -2264,15 +2431,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def get_location( self, @@ -2312,8 +2483,7 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2321,7 +2491,11 @@ async def get_location( # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2364,8 +2538,7 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2373,7 +2546,11 @@ async def list_locations( # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2384,12 +2561,13 @@ async def __aenter__(self) -> "BusinessGlossaryServiceAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BusinessGlossaryServiceAsyncClient", -) +__all__ = ("BusinessGlossaryServiceAsyncClient",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py similarity index 82% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py index 7aa0a15b39b1..3900368df428 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py @@ -19,22 +19,34 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.dataplex_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.dataplex_v1 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -50,17 +63,18 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.dataplex_v1.services.business_glossary_service import pagers -from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.dataplex_v1.types import service -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO + +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary, service + +from .transports.base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport from .transports.grpc import BusinessGlossaryServiceGrpcTransport from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport from .transports.rest import BusinessGlossaryServiceRestTransport @@ -73,14 +87,18 @@ class BusinessGlossaryServiceClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BusinessGlossaryServiceTransport]] + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BusinessGlossaryServiceTransport]] _transport_registry["grpc"] = BusinessGlossaryServiceGrpcTransport _transport_registry["grpc_asyncio"] = BusinessGlossaryServiceGrpcAsyncIOTransport _transport_registry["rest"] = BusinessGlossaryServiceRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BusinessGlossaryServiceTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BusinessGlossaryServiceTransport]: """Returns an appropriate transport class. Args: @@ -179,8 +197,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: BusinessGlossaryServiceClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -197,95 +214,156 @@ def transport(self) -> BusinessGlossaryServiceTransport: return self._transport @staticmethod - def glossary_path(project: str,location: str,glossary: str,) -> str: + def glossary_path( + project: str, + location: str, + glossary: str, + ) -> str: """Returns a fully-qualified glossary string.""" - return "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + return "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) @staticmethod - def parse_glossary_path(path: str) -> Dict[str,str]: + def parse_glossary_path(path: str) -> Dict[str, str]: """Parses a glossary path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def glossary_category_path(project: str,location: str,glossary: str,glossary_category: str,) -> str: + def glossary_category_path( + project: str, + location: str, + glossary: str, + glossary_category: str, + ) -> str: """Returns a fully-qualified glossary_category string.""" - return "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format(project=project, location=location, glossary=glossary, glossary_category=glossary_category, ) + return "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format( + project=project, + location=location, + glossary=glossary, + glossary_category=glossary_category, + ) @staticmethod - def parse_glossary_category_path(path: str) -> Dict[str,str]: + def parse_glossary_category_path(path: str) -> Dict[str, str]: """Parses a glossary_category path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/categories/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/categories/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def glossary_term_path(project: str,location: str,glossary: str,glossary_term: str,) -> str: + def glossary_term_path( + project: str, + location: str, + glossary: str, + glossary_term: str, + ) -> str: """Returns a fully-qualified glossary_term string.""" - return "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format(project=project, location=location, glossary=glossary, glossary_term=glossary_term, ) + return "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format( + project=project, + location=location, + glossary=glossary, + glossary_term=glossary_term, + ) @staticmethod - def parse_glossary_term_path(path: str) -> Dict[str,str]: + def parse_glossary_term_path(path: str) -> Dict[str, str]: """Parses a glossary_term path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/terms/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/terms/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -317,16 +395,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -339,7 +423,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -360,13 +446,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -389,7 +481,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -405,17 +499,27 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = ( + BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -451,15 +555,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -492,12 +599,20 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BusinessGlossaryServiceTransport, Callable[..., BusinessGlossaryServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessGlossaryServiceTransport, + Callable[..., BusinessGlossaryServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the business glossary service client. Args: @@ -552,14 +667,26 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BusinessGlossaryServiceClient._read_environment_variables() - self._client_cert_source = BusinessGlossaryServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BusinessGlossaryServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BusinessGlossaryServiceClient._read_environment_variables() + self._client_cert_source = ( + BusinessGlossaryServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = BusinessGlossaryServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -570,7 +697,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -579,8 +708,10 @@ def __init__(self, *, if transport_provided: # transport is a BusinessGlossaryServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -589,20 +720,30 @@ def __init__(self, *, self._transport = cast(BusinessGlossaryServiceTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - BusinessGlossaryServiceClient._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or BusinessGlossaryServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[BusinessGlossaryServiceTransport], Callable[..., BusinessGlossaryServiceTransport]] = ( + transport_init: Union[ + Type[BusinessGlossaryServiceTransport], + Callable[..., BusinessGlossaryServiceTransport], + ] = ( BusinessGlossaryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., BusinessGlossaryServiceTransport], transport) @@ -621,30 +762,39 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceClient`.", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "credentialsType": None, - } + }, ) - def create_glossary(self, - request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, - *, - parent: Optional[str] = None, - glossary: Optional[business_glossary.Glossary] = None, - glossary_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_glossary( + self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a new Glossary resource. .. code-block:: python @@ -724,10 +874,14 @@ def sample_create_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, glossary, glossary_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -749,9 +903,7 @@ def sample_create_glossary(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -776,15 +928,16 @@ def sample_create_glossary(): # Done; return the response. return response - def update_glossary(self, - request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, - *, - glossary: Optional[business_glossary.Glossary] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_glossary( + self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates a Glossary resource. .. code-block:: python @@ -857,10 +1010,14 @@ def sample_update_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [glossary, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -880,9 +1037,9 @@ def sample_update_glossary(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("glossary.name", request.glossary.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), ) # Validate the universe domain. @@ -907,14 +1064,15 @@ def sample_update_glossary(): # Done; return the response. return response - def delete_glossary(self, - request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_glossary( + self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a Glossary resource. All the categories and terms within the Glossary must be deleted before the Glossary can be deleted. @@ -987,10 +1145,14 @@ def sample_delete_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1008,9 +1170,7 @@ def sample_delete_glossary(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1035,14 +1195,15 @@ def sample_delete_glossary(): # Done; return the response. return response - def get_glossary(self, - request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.Glossary: + def get_glossary( + self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: r"""Gets a Glossary resource. .. code-block:: python @@ -1104,10 +1265,14 @@ def sample_get_glossary(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1125,9 +1290,7 @@ def sample_get_glossary(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1144,14 +1307,15 @@ def sample_get_glossary(): # Done; return the response. return response - def list_glossaries(self, - request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossariesPager: + def list_glossaries( + self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesPager: r"""Lists Glossary resources in a project and location. .. code-block:: python @@ -1214,10 +1378,14 @@ def sample_list_glossaries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1235,9 +1403,7 @@ def sample_list_glossaries(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1265,16 +1431,19 @@ def sample_list_glossaries(): # Done; return the response. return response - def create_glossary_category(self, - request: Optional[Union[business_glossary.CreateGlossaryCategoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - category: Optional[business_glossary.GlossaryCategory] = None, - category_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + def create_glossary_category( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryCategoryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Creates a new GlossaryCategory resource. .. code-block:: python @@ -1355,10 +1524,14 @@ def sample_create_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, category, category_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1380,9 +1553,7 @@ def sample_create_glossary_category(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1399,15 +1570,18 @@ def sample_create_glossary_category(): # Done; return the response. return response - def update_glossary_category(self, - request: Optional[Union[business_glossary.UpdateGlossaryCategoryRequest, dict]] = None, - *, - category: Optional[business_glossary.GlossaryCategory] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + def update_glossary_category( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryCategoryRequest, dict] + ] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Updates a GlossaryCategory resource. .. code-block:: python @@ -1478,10 +1652,14 @@ def sample_update_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [category, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1501,9 +1679,9 @@ def sample_update_glossary_category(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("category.name", request.category.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("category.name", request.category.name),) + ), ) # Validate the universe domain. @@ -1520,14 +1698,17 @@ def sample_update_glossary_category(): # Done; return the response. return response - def delete_glossary_category(self, - request: Optional[Union[business_glossary.DeleteGlossaryCategoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_glossary_category( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a GlossaryCategory resource. All the GlossaryCategories and GlossaryTerms nested directly under the specified GlossaryCategory will be moved one @@ -1579,10 +1760,14 @@ def sample_delete_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1600,9 +1785,7 @@ def sample_delete_glossary_category(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1616,14 +1799,17 @@ def sample_delete_glossary_category(): metadata=metadata, ) - def get_glossary_category(self, - request: Optional[Union[business_glossary.GetGlossaryCategoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryCategory: + def get_glossary_category( + self, + request: Optional[ + Union[business_glossary.GetGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Gets a GlossaryCategory resource. .. code-block:: python @@ -1683,10 +1869,14 @@ def sample_get_glossary_category(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1704,9 +1894,7 @@ def sample_get_glossary_category(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1723,14 +1911,17 @@ def sample_get_glossary_category(): # Done; return the response. return response - def list_glossary_categories(self, - request: Optional[Union[business_glossary.ListGlossaryCategoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossaryCategoriesPager: + def list_glossary_categories( + self, + request: Optional[ + Union[business_glossary.ListGlossaryCategoriesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesPager: r"""Lists GlossaryCategory resources in a Glossary. .. code-block:: python @@ -1793,10 +1984,14 @@ def sample_list_glossary_categories(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1814,9 +2009,7 @@ def sample_list_glossary_categories(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1844,16 +2037,19 @@ def sample_list_glossary_categories(): # Done; return the response. return response - def create_glossary_term(self, - request: Optional[Union[business_glossary.CreateGlossaryTermRequest, dict]] = None, - *, - parent: Optional[str] = None, - term: Optional[business_glossary.GlossaryTerm] = None, - term_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + def create_glossary_term( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryTermRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Creates a new GlossaryTerm resource. .. code-block:: python @@ -1931,10 +2127,14 @@ def sample_create_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, term, term_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1956,9 +2156,7 @@ def sample_create_glossary_term(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1975,15 +2173,18 @@ def sample_create_glossary_term(): # Done; return the response. return response - def update_glossary_term(self, - request: Optional[Union[business_glossary.UpdateGlossaryTermRequest, dict]] = None, - *, - term: Optional[business_glossary.GlossaryTerm] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + def update_glossary_term( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryTermRequest, dict] + ] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Updates a GlossaryTerm resource. .. code-block:: python @@ -2055,10 +2256,14 @@ def sample_update_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [term, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2078,9 +2283,9 @@ def sample_update_glossary_term(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("term.name", request.term.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("term.name", request.term.name),) + ), ) # Validate the universe domain. @@ -2097,14 +2302,17 @@ def sample_update_glossary_term(): # Done; return the response. return response - def delete_glossary_term(self, - request: Optional[Union[business_glossary.DeleteGlossaryTermRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_glossary_term( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryTermRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a GlossaryTerm resource. .. code-block:: python @@ -2153,10 +2361,14 @@ def sample_delete_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2174,9 +2386,7 @@ def sample_delete_glossary_term(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2190,14 +2400,15 @@ def sample_delete_glossary_term(): metadata=metadata, ) - def get_glossary_term(self, - request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> business_glossary.GlossaryTerm: + def get_glossary_term( + self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Gets a GlossaryTerm resource. .. code-block:: python @@ -2258,10 +2469,14 @@ def sample_get_glossary_term(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2279,9 +2494,7 @@ def sample_get_glossary_term(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2298,14 +2511,17 @@ def sample_get_glossary_term(): # Done; return the response. return response - def list_glossary_terms(self, - request: Optional[Union[business_glossary.ListGlossaryTermsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListGlossaryTermsPager: + def list_glossary_terms( + self, + request: Optional[ + Union[business_glossary.ListGlossaryTermsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsPager: r"""Lists GlossaryTerm resources in a Glossary. .. code-block:: python @@ -2368,10 +2584,14 @@ def sample_list_glossary_terms(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2389,9 +2609,7 @@ def sample_list_glossary_terms(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2470,8 +2688,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2480,7 +2697,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2526,8 +2747,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2536,7 +2756,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2586,15 +2810,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -2637,15 +2865,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -2685,8 +2917,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2695,7 +2926,11 @@ def get_location( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2741,8 +2976,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2751,7 +2985,11 @@ def list_locations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2760,11 +2998,11 @@ def list_locations( raise e -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BusinessGlossaryServiceClient", -) +__all__ = ("BusinessGlossaryServiceClient",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py similarity index 77% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py index 7d055c9b172a..96480e5393cd 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListGlossariesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., business_glossary.ListGlossariesResponse], - request: business_glossary.ListGlossariesRequest, - response: business_glossary.ListGlossariesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossariesResponse], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[business_glossary.ListGlossariesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[business_glossary.Glossary]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[business_glossary.Glossary]: yield from page.glossaries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGlossariesAsyncPager: @@ -112,14 +134,17 @@ class ListGlossariesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[business_glossary.ListGlossariesResponse]], - request: business_glossary.ListGlossariesRequest, - response: business_glossary.ListGlossariesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[business_glossary.ListGlossariesResponse]], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[business_glossary.ListGlossariesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.Glossary]: async def async_generator(): async for page in self.pages: @@ -163,7 +194,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGlossaryCategoriesPager: @@ -183,14 +214,17 @@ class ListGlossaryCategoriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., business_glossary.ListGlossaryCategoriesResponse], - request: business_glossary.ListGlossaryCategoriesRequest, - response: business_glossary.ListGlossaryCategoriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossaryCategoriesResponse], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -223,7 +257,12 @@ def pages(self) -> Iterator[business_glossary.ListGlossaryCategoriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[business_glossary.GlossaryCategory]: @@ -231,7 +270,7 @@ def __iter__(self) -> Iterator[business_glossary.GlossaryCategory]: yield from page.categories def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGlossaryCategoriesAsyncPager: @@ -251,14 +290,19 @@ class ListGlossaryCategoriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[business_glossary.ListGlossaryCategoriesResponse]], - request: business_glossary.ListGlossaryCategoriesRequest, - response: business_glossary.ListGlossaryCategoriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[business_glossary.ListGlossaryCategoriesResponse] + ], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -287,12 +331,20 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[business_glossary.ListGlossaryCategoriesResponse]: + async def pages( + self, + ) -> AsyncIterator[business_glossary.ListGlossaryCategoriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryCategory]: async def async_generator(): async for page in self.pages: @@ -302,7 +354,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGlossaryTermsPager: @@ -322,14 +374,17 @@ class ListGlossaryTermsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., business_glossary.ListGlossaryTermsResponse], - request: business_glossary.ListGlossaryTermsRequest, - response: business_glossary.ListGlossaryTermsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossaryTermsResponse], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -362,7 +417,12 @@ def pages(self) -> Iterator[business_glossary.ListGlossaryTermsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[business_glossary.GlossaryTerm]: @@ -370,7 +430,7 @@ def __iter__(self) -> Iterator[business_glossary.GlossaryTerm]: yield from page.terms def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGlossaryTermsAsyncPager: @@ -390,14 +450,17 @@ class ListGlossaryTermsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[business_glossary.ListGlossaryTermsResponse]], - request: business_glossary.ListGlossaryTermsRequest, - response: business_glossary.ListGlossaryTermsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[business_glossary.ListGlossaryTermsResponse]], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -430,8 +493,14 @@ async def pages(self) -> AsyncIterator[business_glossary.ListGlossaryTermsRespon yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryTerm]: async def async_generator(): async for page in self.pages: @@ -441,4 +510,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py similarity index 59% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py index 552b11ee5625..62b48ea0365b 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py @@ -19,20 +19,23 @@ from .base import BusinessGlossaryServiceTransport from .grpc import BusinessGlossaryServiceGrpcTransport from .grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport -from .rest import BusinessGlossaryServiceRestTransport -from .rest import BusinessGlossaryServiceRestInterceptor - +from .rest import ( + BusinessGlossaryServiceRestInterceptor, + BusinessGlossaryServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BusinessGlossaryServiceTransport]] -_transport_registry['grpc'] = BusinessGlossaryServiceGrpcTransport -_transport_registry['grpc_asyncio'] = BusinessGlossaryServiceGrpcAsyncIOTransport -_transport_registry['rest'] = BusinessGlossaryServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[BusinessGlossaryServiceTransport]] +_transport_registry["grpc"] = BusinessGlossaryServiceGrpcTransport +_transport_registry["grpc_asyncio"] = BusinessGlossaryServiceGrpcAsyncIOTransport +_transport_registry["rest"] = BusinessGlossaryServiceRestTransport __all__ = ( - 'BusinessGlossaryServiceTransport', - 'BusinessGlossaryServiceGrpcTransport', - 'BusinessGlossaryServiceGrpcAsyncIOTransport', - 'BusinessGlossaryServiceRestTransport', - 'BusinessGlossaryServiceRestInterceptor', + "BusinessGlossaryServiceTransport", + "BusinessGlossaryServiceGrpcTransport", + "BusinessGlossaryServiceGrpcAsyncIOTransport", + "BusinessGlossaryServiceRestTransport", + "BusinessGlossaryServiceRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py similarity index 63% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py index 4eab8f48345e..a3b303b5d5be 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py @@ -16,26 +16,26 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.dataplex_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf from google.protobuf import empty_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +from google.cloud.dataplex_v1 import gapic_version as package_version +from google.cloud.dataplex_v1.types import business_glossary + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -44,24 +44,23 @@ class BusinessGlossaryServiceTransport(abc.ABC): """Abstract transport class for BusinessGlossaryService.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'dataplex.googleapis.com' + DEFAULT_HOST: str = "dataplex.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -97,30 +96,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -235,14 +242,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -252,138 +259,162 @@ def operations_client(self): raise NotImplementedError() @property - def create_glossary(self) -> Callable[ - [business_glossary.CreateGlossaryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_glossary( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_glossary(self) -> Callable[ - [business_glossary.UpdateGlossaryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_glossary( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_glossary(self) -> Callable[ - [business_glossary.DeleteGlossaryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_glossary( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_glossary(self) -> Callable[ - [business_glossary.GetGlossaryRequest], - Union[ - business_glossary.Glossary, - Awaitable[business_glossary.Glossary] - ]]: + def get_glossary( + self, + ) -> Callable[ + [business_glossary.GetGlossaryRequest], + Union[business_glossary.Glossary, Awaitable[business_glossary.Glossary]], + ]: raise NotImplementedError() @property - def list_glossaries(self) -> Callable[ - [business_glossary.ListGlossariesRequest], - Union[ - business_glossary.ListGlossariesResponse, - Awaitable[business_glossary.ListGlossariesResponse] - ]]: + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + Union[ + business_glossary.ListGlossariesResponse, + Awaitable[business_glossary.ListGlossariesResponse], + ], + ]: raise NotImplementedError() @property - def create_glossary_category(self) -> Callable[ - [business_glossary.CreateGlossaryCategoryRequest], - Union[ - business_glossary.GlossaryCategory, - Awaitable[business_glossary.GlossaryCategory] - ]]: + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: raise NotImplementedError() @property - def update_glossary_category(self) -> Callable[ - [business_glossary.UpdateGlossaryCategoryRequest], - Union[ - business_glossary.GlossaryCategory, - Awaitable[business_glossary.GlossaryCategory] - ]]: + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: raise NotImplementedError() @property - def delete_glossary_category(self) -> Callable[ - [business_glossary.DeleteGlossaryCategoryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_glossary_category( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def get_glossary_category(self) -> Callable[ - [business_glossary.GetGlossaryCategoryRequest], - Union[ - business_glossary.GlossaryCategory, - Awaitable[business_glossary.GlossaryCategory] - ]]: + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: raise NotImplementedError() @property - def list_glossary_categories(self) -> Callable[ - [business_glossary.ListGlossaryCategoriesRequest], - Union[ - business_glossary.ListGlossaryCategoriesResponse, - Awaitable[business_glossary.ListGlossaryCategoriesResponse] - ]]: + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Union[ + business_glossary.ListGlossaryCategoriesResponse, + Awaitable[business_glossary.ListGlossaryCategoriesResponse], + ], + ]: raise NotImplementedError() @property - def create_glossary_term(self) -> Callable[ - [business_glossary.CreateGlossaryTermRequest], - Union[ - business_glossary.GlossaryTerm, - Awaitable[business_glossary.GlossaryTerm] - ]]: + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: raise NotImplementedError() @property - def update_glossary_term(self) -> Callable[ - [business_glossary.UpdateGlossaryTermRequest], - Union[ - business_glossary.GlossaryTerm, - Awaitable[business_glossary.GlossaryTerm] - ]]: + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: raise NotImplementedError() @property - def delete_glossary_term(self) -> Callable[ - [business_glossary.DeleteGlossaryTermRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_glossary_term( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def get_glossary_term(self) -> Callable[ - [business_glossary.GetGlossaryTermRequest], - Union[ - business_glossary.GlossaryTerm, - Awaitable[business_glossary.GlossaryTerm] - ]]: + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: raise NotImplementedError() @property - def list_glossary_terms(self) -> Callable[ - [business_glossary.ListGlossaryTermsRequest], - Union[ - business_glossary.ListGlossaryTermsResponse, - Awaitable[business_glossary.ListGlossaryTermsResponse] - ]]: + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Union[ + business_glossary.ListGlossaryTermsResponse, + Awaitable[business_glossary.ListGlossaryTermsResponse], + ], + ]: raise NotImplementedError() @property @@ -391,7 +422,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -407,23 +441,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -431,10 +460,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -443,6 +476,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'BusinessGlossaryServiceTransport', -) +__all__ = ("BusinessGlossaryServiceTransport",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py similarity index 78% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py index aa49382c72e8..0acb54a4bd42 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py @@ -16,31 +16,30 @@ import json import logging as std_logging import pickle -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore import proto # type: ignore from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -50,7 +49,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -71,7 +72,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -82,7 +83,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -97,7 +102,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": client_call_details.method, "response": grpc_response, @@ -125,23 +130,26 @@ class BusinessGlossaryServiceGrpcTransport(BusinessGlossaryServiceTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -264,19 +272,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -311,13 +323,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -337,9 +348,9 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def create_glossary(self) -> Callable[ - [business_glossary.CreateGlossaryRequest], - operations_pb2.Operation]: + def create_glossary( + self, + ) -> Callable[[business_glossary.CreateGlossaryRequest], operations_pb2.Operation]: r"""Return a callable for the create glossary method over gRPC. Creates a new Glossary resource. @@ -354,18 +365,18 @@ def create_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary' not in self._stubs: - self._stubs['create_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary', + if "create_glossary" not in self._stubs: + self._stubs["create_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary", request_serializer=business_glossary.CreateGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_glossary'] + return self._stubs["create_glossary"] @property - def update_glossary(self) -> Callable[ - [business_glossary.UpdateGlossaryRequest], - operations_pb2.Operation]: + def update_glossary( + self, + ) -> Callable[[business_glossary.UpdateGlossaryRequest], operations_pb2.Operation]: r"""Return a callable for the update glossary method over gRPC. Updates a Glossary resource. @@ -380,18 +391,18 @@ def update_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary' not in self._stubs: - self._stubs['update_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary', + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary", request_serializer=business_glossary.UpdateGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_glossary'] + return self._stubs["update_glossary"] @property - def delete_glossary(self) -> Callable[ - [business_glossary.DeleteGlossaryRequest], - operations_pb2.Operation]: + def delete_glossary( + self, + ) -> Callable[[business_glossary.DeleteGlossaryRequest], operations_pb2.Operation]: r"""Return a callable for the delete glossary method over gRPC. Deletes a Glossary resource. All the categories and @@ -408,18 +419,18 @@ def delete_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary' not in self._stubs: - self._stubs['delete_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary', + if "delete_glossary" not in self._stubs: + self._stubs["delete_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary", request_serializer=business_glossary.DeleteGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_glossary'] + return self._stubs["delete_glossary"] @property - def get_glossary(self) -> Callable[ - [business_glossary.GetGlossaryRequest], - business_glossary.Glossary]: + def get_glossary( + self, + ) -> Callable[[business_glossary.GetGlossaryRequest], business_glossary.Glossary]: r"""Return a callable for the get glossary method over gRPC. Gets a Glossary resource. @@ -434,18 +445,21 @@ def get_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary' not in self._stubs: - self._stubs['get_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary', + if "get_glossary" not in self._stubs: + self._stubs["get_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary", request_serializer=business_glossary.GetGlossaryRequest.serialize, response_deserializer=business_glossary.Glossary.deserialize, ) - return self._stubs['get_glossary'] + return self._stubs["get_glossary"] @property - def list_glossaries(self) -> Callable[ - [business_glossary.ListGlossariesRequest], - business_glossary.ListGlossariesResponse]: + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse, + ]: r"""Return a callable for the list glossaries method over gRPC. Lists Glossary resources in a project and location. @@ -460,18 +474,21 @@ def list_glossaries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossaries' not in self._stubs: - self._stubs['list_glossaries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries', + if "list_glossaries" not in self._stubs: + self._stubs["list_glossaries"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries", request_serializer=business_glossary.ListGlossariesRequest.serialize, response_deserializer=business_glossary.ListGlossariesResponse.deserialize, ) - return self._stubs['list_glossaries'] + return self._stubs["list_glossaries"] @property - def create_glossary_category(self) -> Callable[ - [business_glossary.CreateGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: r"""Return a callable for the create glossary category method over gRPC. Creates a new GlossaryCategory resource. @@ -486,18 +503,21 @@ def create_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary_category' not in self._stubs: - self._stubs['create_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory', + if "create_glossary_category" not in self._stubs: + self._stubs["create_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory", request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['create_glossary_category'] + return self._stubs["create_glossary_category"] @property - def update_glossary_category(self) -> Callable[ - [business_glossary.UpdateGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: r"""Return a callable for the update glossary category method over gRPC. Updates a GlossaryCategory resource. @@ -512,18 +532,18 @@ def update_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary_category' not in self._stubs: - self._stubs['update_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory', + if "update_glossary_category" not in self._stubs: + self._stubs["update_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory", request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['update_glossary_category'] + return self._stubs["update_glossary_category"] @property - def delete_glossary_category(self) -> Callable[ - [business_glossary.DeleteGlossaryCategoryRequest], - empty_pb2.Empty]: + def delete_glossary_category( + self, + ) -> Callable[[business_glossary.DeleteGlossaryCategoryRequest], empty_pb2.Empty]: r"""Return a callable for the delete glossary category method over gRPC. Deletes a GlossaryCategory resource. All the @@ -541,18 +561,21 @@ def delete_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary_category' not in self._stubs: - self._stubs['delete_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory', + if "delete_glossary_category" not in self._stubs: + self._stubs["delete_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory", request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_glossary_category'] + return self._stubs["delete_glossary_category"] @property - def get_glossary_category(self) -> Callable[ - [business_glossary.GetGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: r"""Return a callable for the get glossary category method over gRPC. Gets a GlossaryCategory resource. @@ -567,18 +590,21 @@ def get_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary_category' not in self._stubs: - self._stubs['get_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory', + if "get_glossary_category" not in self._stubs: + self._stubs["get_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory", request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['get_glossary_category'] + return self._stubs["get_glossary_category"] @property - def list_glossary_categories(self) -> Callable[ - [business_glossary.ListGlossaryCategoriesRequest], - business_glossary.ListGlossaryCategoriesResponse]: + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse, + ]: r"""Return a callable for the list glossary categories method over gRPC. Lists GlossaryCategory resources in a Glossary. @@ -593,18 +619,20 @@ def list_glossary_categories(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossary_categories' not in self._stubs: - self._stubs['list_glossary_categories'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories', + if "list_glossary_categories" not in self._stubs: + self._stubs["list_glossary_categories"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories", request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, ) - return self._stubs['list_glossary_categories'] + return self._stubs["list_glossary_categories"] @property - def create_glossary_term(self) -> Callable[ - [business_glossary.CreateGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: r"""Return a callable for the create glossary term method over gRPC. Creates a new GlossaryTerm resource. @@ -619,18 +647,20 @@ def create_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary_term' not in self._stubs: - self._stubs['create_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm', + if "create_glossary_term" not in self._stubs: + self._stubs["create_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm", request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['create_glossary_term'] + return self._stubs["create_glossary_term"] @property - def update_glossary_term(self) -> Callable[ - [business_glossary.UpdateGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: r"""Return a callable for the update glossary term method over gRPC. Updates a GlossaryTerm resource. @@ -645,18 +675,18 @@ def update_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary_term' not in self._stubs: - self._stubs['update_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm', + if "update_glossary_term" not in self._stubs: + self._stubs["update_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm", request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['update_glossary_term'] + return self._stubs["update_glossary_term"] @property - def delete_glossary_term(self) -> Callable[ - [business_glossary.DeleteGlossaryTermRequest], - empty_pb2.Empty]: + def delete_glossary_term( + self, + ) -> Callable[[business_glossary.DeleteGlossaryTermRequest], empty_pb2.Empty]: r"""Return a callable for the delete glossary term method over gRPC. Deletes a GlossaryTerm resource. @@ -671,18 +701,20 @@ def delete_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary_term' not in self._stubs: - self._stubs['delete_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm', + if "delete_glossary_term" not in self._stubs: + self._stubs["delete_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm", request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_glossary_term'] + return self._stubs["delete_glossary_term"] @property - def get_glossary_term(self) -> Callable[ - [business_glossary.GetGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], business_glossary.GlossaryTerm + ]: r"""Return a callable for the get glossary term method over gRPC. Gets a GlossaryTerm resource. @@ -697,18 +729,21 @@ def get_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary_term' not in self._stubs: - self._stubs['get_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm', + if "get_glossary_term" not in self._stubs: + self._stubs["get_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm", request_serializer=business_glossary.GetGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['get_glossary_term'] + return self._stubs["get_glossary_term"] @property - def list_glossary_terms(self) -> Callable[ - [business_glossary.ListGlossaryTermsRequest], - business_glossary.ListGlossaryTermsResponse]: + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse, + ]: r"""Return a callable for the list glossary terms method over gRPC. Lists GlossaryTerm resources in a Glossary. @@ -723,13 +758,13 @@ def list_glossary_terms(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossary_terms' not in self._stubs: - self._stubs['list_glossary_terms'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms', + if "list_glossary_terms" not in self._stubs: + self._stubs["list_glossary_terms"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms", request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, ) - return self._stubs['list_glossary_terms'] + return self._stubs["list_glossary_terms"] def close(self): self._logged_channel.close() @@ -738,8 +773,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -756,8 +790,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -774,8 +807,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -791,9 +823,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -809,9 +842,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -828,8 +862,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -847,6 +880,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'BusinessGlossaryServiceGrpcTransport', -) +__all__ = ("BusinessGlossaryServiceGrpcTransport",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py similarity index 79% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py index 2493fc37b685..76eb49de5442 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py @@ -15,36 +15,35 @@ # import inspect import json -import pickle import logging as std_logging -import warnings +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport from .grpc import BusinessGlossaryServiceGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,9 +51,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -75,7 +78,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -86,7 +89,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -101,7 +108,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -134,13 +141,15 @@ class BusinessGlossaryServiceGrpcAsyncIOTransport(BusinessGlossaryServiceTranspo _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -170,24 +179,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -312,7 +323,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -343,9 +356,11 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def create_glossary(self) -> Callable[ - [business_glossary.CreateGlossaryRequest], - Awaitable[operations_pb2.Operation]]: + def create_glossary( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create glossary method over gRPC. Creates a new Glossary resource. @@ -360,18 +375,20 @@ def create_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary' not in self._stubs: - self._stubs['create_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary', + if "create_glossary" not in self._stubs: + self._stubs["create_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary", request_serializer=business_glossary.CreateGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_glossary'] + return self._stubs["create_glossary"] @property - def update_glossary(self) -> Callable[ - [business_glossary.UpdateGlossaryRequest], - Awaitable[operations_pb2.Operation]]: + def update_glossary( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update glossary method over gRPC. Updates a Glossary resource. @@ -386,18 +403,20 @@ def update_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary' not in self._stubs: - self._stubs['update_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary', + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary", request_serializer=business_glossary.UpdateGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_glossary'] + return self._stubs["update_glossary"] @property - def delete_glossary(self) -> Callable[ - [business_glossary.DeleteGlossaryRequest], - Awaitable[operations_pb2.Operation]]: + def delete_glossary( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete glossary method over gRPC. Deletes a Glossary resource. All the categories and @@ -414,18 +433,20 @@ def delete_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary' not in self._stubs: - self._stubs['delete_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary', + if "delete_glossary" not in self._stubs: + self._stubs["delete_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary", request_serializer=business_glossary.DeleteGlossaryRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_glossary'] + return self._stubs["delete_glossary"] @property - def get_glossary(self) -> Callable[ - [business_glossary.GetGlossaryRequest], - Awaitable[business_glossary.Glossary]]: + def get_glossary( + self, + ) -> Callable[ + [business_glossary.GetGlossaryRequest], Awaitable[business_glossary.Glossary] + ]: r"""Return a callable for the get glossary method over gRPC. Gets a Glossary resource. @@ -440,18 +461,21 @@ def get_glossary(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary' not in self._stubs: - self._stubs['get_glossary'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary', + if "get_glossary" not in self._stubs: + self._stubs["get_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary", request_serializer=business_glossary.GetGlossaryRequest.serialize, response_deserializer=business_glossary.Glossary.deserialize, ) - return self._stubs['get_glossary'] + return self._stubs["get_glossary"] @property - def list_glossaries(self) -> Callable[ - [business_glossary.ListGlossariesRequest], - Awaitable[business_glossary.ListGlossariesResponse]]: + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + Awaitable[business_glossary.ListGlossariesResponse], + ]: r"""Return a callable for the list glossaries method over gRPC. Lists Glossary resources in a project and location. @@ -466,18 +490,21 @@ def list_glossaries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossaries' not in self._stubs: - self._stubs['list_glossaries'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries', + if "list_glossaries" not in self._stubs: + self._stubs["list_glossaries"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries", request_serializer=business_glossary.ListGlossariesRequest.serialize, response_deserializer=business_glossary.ListGlossariesResponse.deserialize, ) - return self._stubs['list_glossaries'] + return self._stubs["list_glossaries"] @property - def create_glossary_category(self) -> Callable[ - [business_glossary.CreateGlossaryCategoryRequest], - Awaitable[business_glossary.GlossaryCategory]]: + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: r"""Return a callable for the create glossary category method over gRPC. Creates a new GlossaryCategory resource. @@ -492,18 +519,21 @@ def create_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary_category' not in self._stubs: - self._stubs['create_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory', + if "create_glossary_category" not in self._stubs: + self._stubs["create_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory", request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['create_glossary_category'] + return self._stubs["create_glossary_category"] @property - def update_glossary_category(self) -> Callable[ - [business_glossary.UpdateGlossaryCategoryRequest], - Awaitable[business_glossary.GlossaryCategory]]: + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: r"""Return a callable for the update glossary category method over gRPC. Updates a GlossaryCategory resource. @@ -518,18 +548,20 @@ def update_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary_category' not in self._stubs: - self._stubs['update_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory', + if "update_glossary_category" not in self._stubs: + self._stubs["update_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory", request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['update_glossary_category'] + return self._stubs["update_glossary_category"] @property - def delete_glossary_category(self) -> Callable[ - [business_glossary.DeleteGlossaryCategoryRequest], - Awaitable[empty_pb2.Empty]]: + def delete_glossary_category( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete glossary category method over gRPC. Deletes a GlossaryCategory resource. All the @@ -547,18 +579,21 @@ def delete_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary_category' not in self._stubs: - self._stubs['delete_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory', + if "delete_glossary_category" not in self._stubs: + self._stubs["delete_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory", request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_glossary_category'] + return self._stubs["delete_glossary_category"] @property - def get_glossary_category(self) -> Callable[ - [business_glossary.GetGlossaryCategoryRequest], - Awaitable[business_glossary.GlossaryCategory]]: + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: r"""Return a callable for the get glossary category method over gRPC. Gets a GlossaryCategory resource. @@ -573,18 +608,21 @@ def get_glossary_category(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary_category' not in self._stubs: - self._stubs['get_glossary_category'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory', + if "get_glossary_category" not in self._stubs: + self._stubs["get_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory", request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, response_deserializer=business_glossary.GlossaryCategory.deserialize, ) - return self._stubs['get_glossary_category'] + return self._stubs["get_glossary_category"] @property - def list_glossary_categories(self) -> Callable[ - [business_glossary.ListGlossaryCategoriesRequest], - Awaitable[business_glossary.ListGlossaryCategoriesResponse]]: + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Awaitable[business_glossary.ListGlossaryCategoriesResponse], + ]: r"""Return a callable for the list glossary categories method over gRPC. Lists GlossaryCategory resources in a Glossary. @@ -599,18 +637,21 @@ def list_glossary_categories(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossary_categories' not in self._stubs: - self._stubs['list_glossary_categories'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories', + if "list_glossary_categories" not in self._stubs: + self._stubs["list_glossary_categories"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories", request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, ) - return self._stubs['list_glossary_categories'] + return self._stubs["list_glossary_categories"] @property - def create_glossary_term(self) -> Callable[ - [business_glossary.CreateGlossaryTermRequest], - Awaitable[business_glossary.GlossaryTerm]]: + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: r"""Return a callable for the create glossary term method over gRPC. Creates a new GlossaryTerm resource. @@ -625,18 +666,21 @@ def create_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_glossary_term' not in self._stubs: - self._stubs['create_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm', + if "create_glossary_term" not in self._stubs: + self._stubs["create_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm", request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['create_glossary_term'] + return self._stubs["create_glossary_term"] @property - def update_glossary_term(self) -> Callable[ - [business_glossary.UpdateGlossaryTermRequest], - Awaitable[business_glossary.GlossaryTerm]]: + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: r"""Return a callable for the update glossary term method over gRPC. Updates a GlossaryTerm resource. @@ -651,18 +695,20 @@ def update_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_glossary_term' not in self._stubs: - self._stubs['update_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm', + if "update_glossary_term" not in self._stubs: + self._stubs["update_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm", request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['update_glossary_term'] + return self._stubs["update_glossary_term"] @property - def delete_glossary_term(self) -> Callable[ - [business_glossary.DeleteGlossaryTermRequest], - Awaitable[empty_pb2.Empty]]: + def delete_glossary_term( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete glossary term method over gRPC. Deletes a GlossaryTerm resource. @@ -677,18 +723,21 @@ def delete_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_glossary_term' not in self._stubs: - self._stubs['delete_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm', + if "delete_glossary_term" not in self._stubs: + self._stubs["delete_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm", request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_glossary_term'] + return self._stubs["delete_glossary_term"] @property - def get_glossary_term(self) -> Callable[ - [business_glossary.GetGlossaryTermRequest], - Awaitable[business_glossary.GlossaryTerm]]: + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: r"""Return a callable for the get glossary term method over gRPC. Gets a GlossaryTerm resource. @@ -703,18 +752,21 @@ def get_glossary_term(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_glossary_term' not in self._stubs: - self._stubs['get_glossary_term'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm', + if "get_glossary_term" not in self._stubs: + self._stubs["get_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm", request_serializer=business_glossary.GetGlossaryTermRequest.serialize, response_deserializer=business_glossary.GlossaryTerm.deserialize, ) - return self._stubs['get_glossary_term'] + return self._stubs["get_glossary_term"] @property - def list_glossary_terms(self) -> Callable[ - [business_glossary.ListGlossaryTermsRequest], - Awaitable[business_glossary.ListGlossaryTermsResponse]]: + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Awaitable[business_glossary.ListGlossaryTermsResponse], + ]: r"""Return a callable for the list glossary terms method over gRPC. Lists GlossaryTerm resources in a Glossary. @@ -729,16 +781,16 @@ def list_glossary_terms(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_glossary_terms' not in self._stubs: - self._stubs['list_glossary_terms'] = self._logged_channel.unary_unary( - '/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms', + if "list_glossary_terms" not in self._stubs: + self._stubs["list_glossary_terms"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms", request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, ) - return self._stubs['list_glossary_terms'] + return self._stubs["list_glossary_terms"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.create_glossary: self._wrap_method( self.create_glossary, @@ -863,8 +915,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -881,8 +932,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -899,8 +949,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -916,9 +965,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -934,9 +984,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -953,8 +1004,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -968,6 +1018,4 @@ def get_location( return self._stubs["get_location"] -__all__ = ( - 'BusinessGlossaryServiceGrpcAsyncIOTransport', -) +__all__ = ("BusinessGlossaryServiceGrpcAsyncIOTransport",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py similarity index 66% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py index babf4e3a4beb..6ef1c9c485a7 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py @@ -13,37 +13,30 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.dataplex_v1.types import business_glossary -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseBusinessGlossaryServiceRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBusinessGlossaryServiceRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -52,6 +45,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -200,7 +194,14 @@ def post_update_glossary_term(self, response): """ - def pre_create_glossary(self, request: business_glossary.CreateGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_glossary( + self, + request: business_glossary.CreateGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_glossary Override in a subclass to manipulate the request or metadata @@ -208,7 +209,9 @@ def pre_create_glossary(self, request: business_glossary.CreateGlossaryRequest, """ return request, metadata - def post_create_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_glossary DEPRECATED. Please use the `post_create_glossary_with_metadata` @@ -221,7 +224,11 @@ def post_create_glossary(self, response: operations_pb2.Operation) -> operations """ return response - def post_create_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_glossary Override in a subclass to read or manipulate the response or metadata after it @@ -236,7 +243,14 @@ def post_create_glossary_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_create_glossary_category(self, request: business_glossary.CreateGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_glossary_category( + self, + request: business_glossary.CreateGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for create_glossary_category Override in a subclass to manipulate the request or metadata @@ -244,7 +258,9 @@ def pre_create_glossary_category(self, request: business_glossary.CreateGlossary """ return request, metadata - def post_create_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + def post_create_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: """Post-rpc interceptor for create_glossary_category DEPRECATED. Please use the `post_create_glossary_category_with_metadata` @@ -257,7 +273,13 @@ def post_create_glossary_category(self, response: business_glossary.GlossaryCate """ return response - def post_create_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for create_glossary_category Override in a subclass to read or manipulate the response or metadata after it @@ -272,7 +294,14 @@ def post_create_glossary_category_with_metadata(self, response: business_glossar """ return response, metadata - def pre_create_glossary_term(self, request: business_glossary.CreateGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.CreateGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_glossary_term( + self, + request: business_glossary.CreateGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for create_glossary_term Override in a subclass to manipulate the request or metadata @@ -280,7 +309,9 @@ def pre_create_glossary_term(self, request: business_glossary.CreateGlossaryTerm """ return request, metadata - def post_create_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + def post_create_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: """Post-rpc interceptor for create_glossary_term DEPRECATED. Please use the `post_create_glossary_term_with_metadata` @@ -293,7 +324,11 @@ def post_create_glossary_term(self, response: business_glossary.GlossaryTerm) -> """ return response - def post_create_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_glossary_term Override in a subclass to read or manipulate the response or metadata after it @@ -308,7 +343,13 @@ def post_create_glossary_term_with_metadata(self, response: business_glossary.Gl """ return response, metadata - def pre_delete_glossary(self, request: business_glossary.DeleteGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_glossary( + self, + request: business_glossary.DeleteGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_glossary Override in a subclass to manipulate the request or metadata @@ -316,7 +357,9 @@ def pre_delete_glossary(self, request: business_glossary.DeleteGlossaryRequest, """ return request, metadata - def post_delete_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_glossary DEPRECATED. Please use the `post_delete_glossary_with_metadata` @@ -329,7 +372,11 @@ def post_delete_glossary(self, response: operations_pb2.Operation) -> operations """ return response - def post_delete_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_glossary Override in a subclass to read or manipulate the response or metadata after it @@ -344,7 +391,14 @@ def post_delete_glossary_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_glossary_category(self, request: business_glossary.DeleteGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_glossary_category( + self, + request: business_glossary.DeleteGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for delete_glossary_category Override in a subclass to manipulate the request or metadata @@ -352,7 +406,14 @@ def pre_delete_glossary_category(self, request: business_glossary.DeleteGlossary """ return request, metadata - def pre_delete_glossary_term(self, request: business_glossary.DeleteGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.DeleteGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_glossary_term( + self, + request: business_glossary.DeleteGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for delete_glossary_term Override in a subclass to manipulate the request or metadata @@ -360,7 +421,13 @@ def pre_delete_glossary_term(self, request: business_glossary.DeleteGlossaryTerm """ return request, metadata - def pre_get_glossary(self, request: business_glossary.GetGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_glossary( + self, + request: business_glossary.GetGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_glossary Override in a subclass to manipulate the request or metadata @@ -368,7 +435,9 @@ def pre_get_glossary(self, request: business_glossary.GetGlossaryRequest, metada """ return request, metadata - def post_get_glossary(self, response: business_glossary.Glossary) -> business_glossary.Glossary: + def post_get_glossary( + self, response: business_glossary.Glossary + ) -> business_glossary.Glossary: """Post-rpc interceptor for get_glossary DEPRECATED. Please use the `post_get_glossary_with_metadata` @@ -381,7 +450,11 @@ def post_get_glossary(self, response: business_glossary.Glossary) -> business_gl """ return response - def post_get_glossary_with_metadata(self, response: business_glossary.Glossary, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.Glossary, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_glossary_with_metadata( + self, + response: business_glossary.Glossary, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.Glossary, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_glossary Override in a subclass to read or manipulate the response or metadata after it @@ -396,7 +469,14 @@ def post_get_glossary_with_metadata(self, response: business_glossary.Glossary, """ return response, metadata - def pre_get_glossary_category(self, request: business_glossary.GetGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_glossary_category( + self, + request: business_glossary.GetGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for get_glossary_category Override in a subclass to manipulate the request or metadata @@ -404,7 +484,9 @@ def pre_get_glossary_category(self, request: business_glossary.GetGlossaryCatego """ return request, metadata - def post_get_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + def post_get_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: """Post-rpc interceptor for get_glossary_category DEPRECATED. Please use the `post_get_glossary_category_with_metadata` @@ -417,7 +499,13 @@ def post_get_glossary_category(self, response: business_glossary.GlossaryCategor """ return response - def post_get_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for get_glossary_category Override in a subclass to read or manipulate the response or metadata after it @@ -432,7 +520,14 @@ def post_get_glossary_category_with_metadata(self, response: business_glossary.G """ return response, metadata - def pre_get_glossary_term(self, request: business_glossary.GetGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GetGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_glossary_term( + self, + request: business_glossary.GetGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for get_glossary_term Override in a subclass to manipulate the request or metadata @@ -440,7 +535,9 @@ def pre_get_glossary_term(self, request: business_glossary.GetGlossaryTermReques """ return request, metadata - def post_get_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + def post_get_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: """Post-rpc interceptor for get_glossary_term DEPRECATED. Please use the `post_get_glossary_term_with_metadata` @@ -453,7 +550,11 @@ def post_get_glossary_term(self, response: business_glossary.GlossaryTerm) -> bu """ return response - def post_get_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_glossary_term Override in a subclass to read or manipulate the response or metadata after it @@ -468,7 +569,13 @@ def post_get_glossary_term_with_metadata(self, response: business_glossary.Gloss """ return response, metadata - def pre_list_glossaries(self, request: business_glossary.ListGlossariesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossariesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_glossaries( + self, + request: business_glossary.ListGlossariesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossariesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_glossaries Override in a subclass to manipulate the request or metadata @@ -476,7 +583,9 @@ def pre_list_glossaries(self, request: business_glossary.ListGlossariesRequest, """ return request, metadata - def post_list_glossaries(self, response: business_glossary.ListGlossariesResponse) -> business_glossary.ListGlossariesResponse: + def post_list_glossaries( + self, response: business_glossary.ListGlossariesResponse + ) -> business_glossary.ListGlossariesResponse: """Post-rpc interceptor for list_glossaries DEPRECATED. Please use the `post_list_glossaries_with_metadata` @@ -489,7 +598,14 @@ def post_list_glossaries(self, response: business_glossary.ListGlossariesRespons """ return response - def post_list_glossaries_with_metadata(self, response: business_glossary.ListGlossariesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossariesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_glossaries_with_metadata( + self, + response: business_glossary.ListGlossariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for list_glossaries Override in a subclass to read or manipulate the response or metadata after it @@ -504,7 +620,14 @@ def post_list_glossaries_with_metadata(self, response: business_glossary.ListGlo """ return response, metadata - def pre_list_glossary_categories(self, request: business_glossary.ListGlossaryCategoriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryCategoriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_glossary_categories( + self, + request: business_glossary.ListGlossaryCategoriesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryCategoriesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for list_glossary_categories Override in a subclass to manipulate the request or metadata @@ -512,7 +635,9 @@ def pre_list_glossary_categories(self, request: business_glossary.ListGlossaryCa """ return request, metadata - def post_list_glossary_categories(self, response: business_glossary.ListGlossaryCategoriesResponse) -> business_glossary.ListGlossaryCategoriesResponse: + def post_list_glossary_categories( + self, response: business_glossary.ListGlossaryCategoriesResponse + ) -> business_glossary.ListGlossaryCategoriesResponse: """Post-rpc interceptor for list_glossary_categories DEPRECATED. Please use the `post_list_glossary_categories_with_metadata` @@ -525,7 +650,14 @@ def post_list_glossary_categories(self, response: business_glossary.ListGlossary """ return response - def post_list_glossary_categories_with_metadata(self, response: business_glossary.ListGlossaryCategoriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryCategoriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_glossary_categories_with_metadata( + self, + response: business_glossary.ListGlossaryCategoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryCategoriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for list_glossary_categories Override in a subclass to read or manipulate the response or metadata after it @@ -540,7 +672,14 @@ def post_list_glossary_categories_with_metadata(self, response: business_glossar """ return response, metadata - def pre_list_glossary_terms(self, request: business_glossary.ListGlossaryTermsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryTermsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_glossary_terms( + self, + request: business_glossary.ListGlossaryTermsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryTermsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for list_glossary_terms Override in a subclass to manipulate the request or metadata @@ -548,7 +687,9 @@ def pre_list_glossary_terms(self, request: business_glossary.ListGlossaryTermsRe """ return request, metadata - def post_list_glossary_terms(self, response: business_glossary.ListGlossaryTermsResponse) -> business_glossary.ListGlossaryTermsResponse: + def post_list_glossary_terms( + self, response: business_glossary.ListGlossaryTermsResponse + ) -> business_glossary.ListGlossaryTermsResponse: """Post-rpc interceptor for list_glossary_terms DEPRECATED. Please use the `post_list_glossary_terms_with_metadata` @@ -561,7 +702,14 @@ def post_list_glossary_terms(self, response: business_glossary.ListGlossaryTerms """ return response - def post_list_glossary_terms_with_metadata(self, response: business_glossary.ListGlossaryTermsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.ListGlossaryTermsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_glossary_terms_with_metadata( + self, + response: business_glossary.ListGlossaryTermsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryTermsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for list_glossary_terms Override in a subclass to read or manipulate the response or metadata after it @@ -576,7 +724,13 @@ def post_list_glossary_terms_with_metadata(self, response: business_glossary.Lis """ return response, metadata - def pre_update_glossary(self, request: business_glossary.UpdateGlossaryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_glossary( + self, + request: business_glossary.UpdateGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_glossary Override in a subclass to manipulate the request or metadata @@ -584,7 +738,9 @@ def pre_update_glossary(self, request: business_glossary.UpdateGlossaryRequest, """ return request, metadata - def post_update_glossary(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_glossary DEPRECATED. Please use the `post_update_glossary_with_metadata` @@ -597,7 +753,11 @@ def post_update_glossary(self, response: operations_pb2.Operation) -> operations """ return response - def post_update_glossary_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_glossary Override in a subclass to read or manipulate the response or metadata after it @@ -612,7 +772,14 @@ def post_update_glossary_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_update_glossary_category(self, request: business_glossary.UpdateGlossaryCategoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryCategoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_glossary_category( + self, + request: business_glossary.UpdateGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for update_glossary_category Override in a subclass to manipulate the request or metadata @@ -620,7 +787,9 @@ def pre_update_glossary_category(self, request: business_glossary.UpdateGlossary """ return request, metadata - def post_update_glossary_category(self, response: business_glossary.GlossaryCategory) -> business_glossary.GlossaryCategory: + def post_update_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: """Post-rpc interceptor for update_glossary_category DEPRECATED. Please use the `post_update_glossary_category_with_metadata` @@ -633,7 +802,13 @@ def post_update_glossary_category(self, response: business_glossary.GlossaryCate """ return response - def post_update_glossary_category_with_metadata(self, response: business_glossary.GlossaryCategory, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for update_glossary_category Override in a subclass to read or manipulate the response or metadata after it @@ -648,7 +823,14 @@ def post_update_glossary_category_with_metadata(self, response: business_glossar """ return response, metadata - def pre_update_glossary_term(self, request: business_glossary.UpdateGlossaryTermRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.UpdateGlossaryTermRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_glossary_term( + self, + request: business_glossary.UpdateGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for update_glossary_term Override in a subclass to manipulate the request or metadata @@ -656,7 +838,9 @@ def pre_update_glossary_term(self, request: business_glossary.UpdateGlossaryTerm """ return request, metadata - def post_update_glossary_term(self, response: business_glossary.GlossaryTerm) -> business_glossary.GlossaryTerm: + def post_update_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: """Post-rpc interceptor for update_glossary_term DEPRECATED. Please use the `post_update_glossary_term_with_metadata` @@ -669,7 +853,11 @@ def post_update_glossary_term(self, response: business_glossary.GlossaryTerm) -> """ return response - def post_update_glossary_term_with_metadata(self, response: business_glossary.GlossaryTerm, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_glossary_term Override in a subclass to read or manipulate the response or metadata after it @@ -685,8 +873,12 @@ def post_update_glossary_term_with_metadata(self, response: business_glossary.Gl return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -706,8 +898,12 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -727,8 +923,12 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -736,9 +936,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -748,8 +946,12 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -757,9 +959,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -769,8 +969,12 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -790,8 +994,12 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -836,20 +1044,21 @@ class BusinessGlossaryServiceRestTransport(_BaseBusinessGlossaryServiceRestTrans It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BusinessGlossaryServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BusinessGlossaryServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -892,10 +1101,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -912,64 +1122,70 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", }, { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary, BusinessGlossaryServiceRestStub): + class _CreateGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.CreateGlossary") @@ -981,27 +1197,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.CreateGlossaryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: business_glossary.CreateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create glossary method over HTTP. Args: @@ -1023,32 +1241,44 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_http_options() + ) request, metadata = self._interceptor.pre_create_glossary(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossary", "httpRequest": http_request, @@ -1057,7 +1287,17 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._CreateGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + BusinessGlossaryServiceRestTransport._CreateGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1070,20 +1310,24 @@ def __call__(self, resp = self._interceptor.post_create_glossary(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_glossary_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossary", "metadata": http_response["headers"], @@ -1092,7 +1336,10 @@ def __call__(self, ) return resp - class _CreateGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory, BusinessGlossaryServiceRestStub): + class _CreateGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryCategory") @@ -1104,27 +1351,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.CreateGlossaryCategoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryCategory: + def __call__( + self, + request: business_glossary.CreateGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Call the create glossary category method over HTTP. Args: @@ -1148,32 +1397,46 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_http_options() + ) - request, metadata = self._interceptor.pre_create_glossary_category(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_create_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryCategory", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossaryCategory", "httpRequest": http_request, @@ -1182,7 +1445,15 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._CreateGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = BusinessGlossaryServiceRestTransport._CreateGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1197,20 +1468,26 @@ def __call__(self, resp = self._interceptor.post_create_glossary_category(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_glossary_category_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.GlossaryCategory.to_json(response) + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossaryCategory", "metadata": http_response["headers"], @@ -1219,7 +1496,10 @@ def __call__(self, ) return resp - class _CreateGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm, BusinessGlossaryServiceRestStub): + class _CreateGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryTerm") @@ -1231,27 +1511,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.CreateGlossaryTermRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryTerm: + def __call__( + self, + request: business_glossary.CreateGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Call the create glossary term method over HTTP. Args: @@ -1276,32 +1558,46 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_http_options() + ) - request, metadata = self._interceptor.pre_create_glossary_term(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_create_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryTerm", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossaryTerm", "httpRequest": http_request, @@ -1310,7 +1606,17 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._CreateGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + BusinessGlossaryServiceRestTransport._CreateGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1325,20 +1631,24 @@ def __call__(self, resp = self._interceptor.post_create_glossary_term(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_glossary_term_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = business_glossary.GlossaryTerm.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CreateGlossaryTerm", "metadata": http_response["headers"], @@ -1347,7 +1657,10 @@ def __call__(self, ) return resp - class _DeleteGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary, BusinessGlossaryServiceRestStub): + class _DeleteGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.DeleteGlossary") @@ -1359,26 +1672,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.DeleteGlossaryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: business_glossary.DeleteGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete glossary method over HTTP. Args: @@ -1400,30 +1715,40 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_http_options() + ) request, metadata = self._interceptor.pre_delete_glossary(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "DeleteGlossary", "httpRequest": http_request, @@ -1432,7 +1757,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._DeleteGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._DeleteGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1445,20 +1779,24 @@ def __call__(self, resp = self._interceptor.post_delete_glossary(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_glossary_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "DeleteGlossary", "metadata": http_response["headers"], @@ -1467,7 +1805,10 @@ def __call__(self, ) return resp - class _DeleteGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory, BusinessGlossaryServiceRestStub): + class _DeleteGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryCategory") @@ -1479,26 +1820,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.DeleteGlossaryCategoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: business_glossary.DeleteGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete glossary category method over HTTP. Args: @@ -1513,30 +1856,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_glossary_category(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryCategory", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "DeleteGlossaryCategory", "httpRequest": http_request, @@ -1545,14 +1900,24 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._DeleteGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = BusinessGlossaryServiceRestTransport._DeleteGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm, BusinessGlossaryServiceRestStub): + class _DeleteGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryTerm") @@ -1564,26 +1929,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.DeleteGlossaryTermRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: business_glossary.DeleteGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete glossary term method over HTTP. Args: @@ -1598,30 +1965,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_glossary_term(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryTerm", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "DeleteGlossaryTerm", "httpRequest": http_request, @@ -1630,14 +2009,26 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._DeleteGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._DeleteGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary, BusinessGlossaryServiceRestStub): + class _GetGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.GetGlossary") @@ -1649,26 +2040,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.GetGlossaryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.Glossary: + def __call__( + self, + request: business_glossary.GetGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: r"""Call the get glossary method over HTTP. Args: @@ -1694,30 +2087,40 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_http_options() + ) request, metadata = self._interceptor.pre_get_glossary(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossary", "httpRequest": http_request, @@ -1726,7 +2129,14 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._GetGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = BusinessGlossaryServiceRestTransport._GetGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1741,20 +2151,24 @@ def __call__(self, resp = self._interceptor.post_get_glossary(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_glossary_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = business_glossary.Glossary.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossary", "metadata": http_response["headers"], @@ -1763,7 +2177,10 @@ def __call__(self, ) return resp - class _GetGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory, BusinessGlossaryServiceRestStub): + class _GetGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.GetGlossaryCategory") @@ -1775,26 +2192,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.GetGlossaryCategoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryCategory: + def __call__( + self, + request: business_glossary.GetGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Call the get glossary category method over HTTP. Args: @@ -1817,30 +2236,42 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_http_options() + ) - request, metadata = self._interceptor.pre_get_glossary_category(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_get_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryCategory", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossaryCategory", "httpRequest": http_request, @@ -1849,7 +2280,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._GetGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._GetGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1864,20 +2304,26 @@ def __call__(self, resp = self._interceptor.post_get_glossary_category(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_glossary_category_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.GlossaryCategory.to_json(response) + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossaryCategory", "metadata": http_response["headers"], @@ -1886,7 +2332,10 @@ def __call__(self, ) return resp - class _GetGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm, BusinessGlossaryServiceRestStub): + class _GetGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.GetGlossaryTerm") @@ -1898,26 +2347,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.GetGlossaryTermRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryTerm: + def __call__( + self, + request: business_glossary.GetGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Call the get glossary term method over HTTP. Args: @@ -1941,30 +2392,42 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_http_options() + ) - request, metadata = self._interceptor.pre_get_glossary_term(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_get_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryTerm", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossaryTerm", "httpRequest": http_request, @@ -1973,7 +2436,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._GetGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._GetGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1988,20 +2460,24 @@ def __call__(self, resp = self._interceptor.post_get_glossary_term(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_glossary_term_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = business_glossary.GlossaryTerm.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetGlossaryTerm", "metadata": http_response["headers"], @@ -2010,7 +2486,10 @@ def __call__(self, ) return resp - class _ListGlossaries(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries, BusinessGlossaryServiceRestStub): + class _ListGlossaries( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.ListGlossaries") @@ -2022,26 +2501,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.ListGlossariesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.ListGlossariesResponse: + def __call__( + self, + request: business_glossary.ListGlossariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossariesResponse: r"""Call the list glossaries method over HTTP. Args: @@ -2060,30 +2541,40 @@ def __call__(self, List Glossaries Response """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_http_options() + ) request, metadata = self._interceptor.pre_list_glossaries(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaries", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaries", "httpRequest": http_request, @@ -2092,7 +2583,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._ListGlossaries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._ListGlossaries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2107,20 +2607,26 @@ def __call__(self, resp = self._interceptor.post_list_glossaries(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_glossaries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_glossaries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.ListGlossariesResponse.to_json(response) + response_payload = business_glossary.ListGlossariesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaries", "metadata": http_response["headers"], @@ -2129,7 +2635,10 @@ def __call__(self, ) return resp - class _ListGlossaryCategories(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories, BusinessGlossaryServiceRestStub): + class _ListGlossaryCategories( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.ListGlossaryCategories") @@ -2141,26 +2650,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.ListGlossaryCategoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.ListGlossaryCategoriesResponse: + def __call__( + self, + request: business_glossary.ListGlossaryCategoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossaryCategoriesResponse: r"""Call the list glossary categories method over HTTP. Args: @@ -2179,30 +2690,42 @@ def __call__(self, List GlossaryCategories Response """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_http_options() + ) - request, metadata = self._interceptor.pre_list_glossary_categories(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_glossary_categories( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryCategories", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaryCategories", "httpRequest": http_request, @@ -2211,7 +2734,14 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._ListGlossaryCategories._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = BusinessGlossaryServiceRestTransport._ListGlossaryCategories._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2226,20 +2756,28 @@ def __call__(self, resp = self._interceptor.post_list_glossary_categories(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_glossary_categories_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_glossary_categories_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.ListGlossaryCategoriesResponse.to_json(response) + response_payload = ( + business_glossary.ListGlossaryCategoriesResponse.to_json( + response + ) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaryCategories", "metadata": http_response["headers"], @@ -2248,7 +2786,10 @@ def __call__(self, ) return resp - class _ListGlossaryTerms(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms, BusinessGlossaryServiceRestStub): + class _ListGlossaryTerms( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.ListGlossaryTerms") @@ -2260,26 +2801,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: business_glossary.ListGlossaryTermsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.ListGlossaryTermsResponse: + def __call__( + self, + request: business_glossary.ListGlossaryTermsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossaryTermsResponse: r"""Call the list glossary terms method over HTTP. Args: @@ -2298,30 +2841,42 @@ def __call__(self, List GlossaryTerms Response """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_http_options() + ) - request, metadata = self._interceptor.pre_list_glossary_terms(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_glossary_terms( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryTerms", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaryTerms", "httpRequest": http_request, @@ -2330,7 +2885,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._ListGlossaryTerms._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._ListGlossaryTerms._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2345,20 +2909,26 @@ def __call__(self, resp = self._interceptor.post_list_glossary_terms(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_glossary_terms_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_glossary_terms_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.ListGlossaryTermsResponse.to_json(response) + response_payload = ( + business_glossary.ListGlossaryTermsResponse.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListGlossaryTerms", "metadata": http_response["headers"], @@ -2367,7 +2937,10 @@ def __call__(self, ) return resp - class _UpdateGlossary(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary, BusinessGlossaryServiceRestStub): + class _UpdateGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.UpdateGlossary") @@ -2379,27 +2952,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.UpdateGlossaryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: business_glossary.UpdateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update glossary method over HTTP. Args: @@ -2421,32 +2996,44 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_http_options() + ) request, metadata = self._interceptor.pre_update_glossary(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossary", "httpRequest": http_request, @@ -2455,7 +3042,17 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._UpdateGlossary._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + BusinessGlossaryServiceRestTransport._UpdateGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2468,20 +3065,24 @@ def __call__(self, resp = self._interceptor.post_update_glossary(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_glossary_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossary", "metadata": http_response["headers"], @@ -2490,7 +3091,10 @@ def __call__(self, ) return resp - class _UpdateGlossaryCategory(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory, BusinessGlossaryServiceRestStub): + class _UpdateGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryCategory") @@ -2502,27 +3106,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.UpdateGlossaryCategoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryCategory: + def __call__( + self, + request: business_glossary.UpdateGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: r"""Call the update glossary category method over HTTP. Args: @@ -2545,32 +3151,46 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_http_options() + ) - request, metadata = self._interceptor.pre_update_glossary_category(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryCategory", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossaryCategory", "httpRequest": http_request, @@ -2579,7 +3199,15 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._UpdateGlossaryCategory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = BusinessGlossaryServiceRestTransport._UpdateGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2594,20 +3222,26 @@ def __call__(self, resp = self._interceptor.post_update_glossary_category(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_glossary_category_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = business_glossary.GlossaryCategory.to_json(response) + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossaryCategory", "metadata": http_response["headers"], @@ -2616,7 +3250,10 @@ def __call__(self, ) return resp - class _UpdateGlossaryTerm(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm, BusinessGlossaryServiceRestStub): + class _UpdateGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryTerm") @@ -2628,27 +3265,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: business_glossary.UpdateGlossaryTermRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> business_glossary.GlossaryTerm: + def __call__( + self, + request: business_glossary.UpdateGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: r"""Call the update glossary term method over HTTP. Args: @@ -2672,32 +3311,46 @@ def __call__(self, """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_http_options() + ) - request, metadata = self._interceptor.pre_update_glossary_term(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryTerm", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossaryTerm", "httpRequest": http_request, @@ -2706,7 +3359,17 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._UpdateGlossaryTerm._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + BusinessGlossaryServiceRestTransport._UpdateGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2721,20 +3384,24 @@ def __call__(self, resp = self._interceptor.post_update_glossary_term(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_glossary_term_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = business_glossary.GlossaryTerm.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "UpdateGlossaryTerm", "metadata": http_response["headers"], @@ -2744,130 +3411,157 @@ def __call__(self, return resp @property - def create_glossary(self) -> Callable[ - [business_glossary.CreateGlossaryRequest], - operations_pb2.Operation]: + def create_glossary( + self, + ) -> Callable[[business_glossary.CreateGlossaryRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore @property - def create_glossary_category(self) -> Callable[ - [business_glossary.CreateGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + return self._CreateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore @property - def create_glossary_term(self) -> Callable[ - [business_glossary.CreateGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + return self._CreateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore @property - def delete_glossary(self) -> Callable[ - [business_glossary.DeleteGlossaryRequest], - operations_pb2.Operation]: + def delete_glossary( + self, + ) -> Callable[[business_glossary.DeleteGlossaryRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore @property - def delete_glossary_category(self) -> Callable[ - [business_glossary.DeleteGlossaryCategoryRequest], - empty_pb2.Empty]: + def delete_glossary_category( + self, + ) -> Callable[[business_glossary.DeleteGlossaryCategoryRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore @property - def delete_glossary_term(self) -> Callable[ - [business_glossary.DeleteGlossaryTermRequest], - empty_pb2.Empty]: + def delete_glossary_term( + self, + ) -> Callable[[business_glossary.DeleteGlossaryTermRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore @property - def get_glossary(self) -> Callable[ - [business_glossary.GetGlossaryRequest], - business_glossary.Glossary]: + def get_glossary( + self, + ) -> Callable[[business_glossary.GetGlossaryRequest], business_glossary.Glossary]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore + return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore @property - def get_glossary_category(self) -> Callable[ - [business_glossary.GetGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + return self._GetGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore @property - def get_glossary_term(self) -> Callable[ - [business_glossary.GetGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], business_glossary.GlossaryTerm + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + return self._GetGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore @property - def list_glossaries(self) -> Callable[ - [business_glossary.ListGlossariesRequest], - business_glossary.ListGlossariesResponse]: + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore @property - def list_glossary_categories(self) -> Callable[ - [business_glossary.ListGlossaryCategoriesRequest], - business_glossary.ListGlossaryCategoriesResponse]: + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListGlossaryCategories(self._session, self._host, self._interceptor) # type: ignore + return self._ListGlossaryCategories(self._session, self._host, self._interceptor) # type: ignore @property - def list_glossary_terms(self) -> Callable[ - [business_glossary.ListGlossaryTermsRequest], - business_glossary.ListGlossaryTermsResponse]: + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListGlossaryTerms(self._session, self._host, self._interceptor) # type: ignore + return self._ListGlossaryTerms(self._session, self._host, self._interceptor) # type: ignore @property - def update_glossary(self) -> Callable[ - [business_glossary.UpdateGlossaryRequest], - operations_pb2.Operation]: + def update_glossary( + self, + ) -> Callable[[business_glossary.UpdateGlossaryRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGlossary(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGlossary(self._session, self._host, self._interceptor) # type: ignore @property - def update_glossary_category(self) -> Callable[ - [business_glossary.UpdateGlossaryCategoryRequest], - business_glossary.GlossaryCategory]: + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore @property - def update_glossary_term(self) -> Callable[ - [business_glossary.UpdateGlossaryTermRequest], - business_glossary.GlossaryTerm]: + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseBusinessGlossaryServiceRestTransport._BaseGetLocation, BusinessGlossaryServiceRestStub): + class _GetLocation( + _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.GetLocation") @@ -2879,27 +3573,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2917,30 +3612,40 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetLocation", "httpRequest": http_request, @@ -2949,7 +3654,14 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = BusinessGlossaryServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2960,19 +3672,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetLocation", "httpResponse": http_response, @@ -2983,9 +3697,12 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseBusinessGlossaryServiceRestTransport._BaseListLocations, BusinessGlossaryServiceRestStub): + class _ListLocations( + _BaseBusinessGlossaryServiceRestTransport._BaseListLocations, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.ListLocations") @@ -2997,27 +3714,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -3035,30 +3753,40 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListLocations", "httpRequest": http_request, @@ -3067,7 +3795,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3078,19 +3815,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListLocations", "httpResponse": http_response, @@ -3101,9 +3840,12 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation, BusinessGlossaryServiceRestStub): + class _CancelOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.CancelOperation") @@ -3115,28 +3857,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -3151,32 +3894,46 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) - body = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + body = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -3185,7 +3942,17 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + BusinessGlossaryServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3196,9 +3963,12 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation, BusinessGlossaryServiceRestStub): + class _DeleteOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.DeleteOperation") @@ -3210,27 +3980,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -3245,30 +4016,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -3277,7 +4060,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3288,9 +4080,12 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseBusinessGlossaryServiceRestTransport._BaseGetOperation, BusinessGlossaryServiceRestStub): + class _GetOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.GetOperation") @@ -3302,27 +4097,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -3340,30 +4136,40 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetOperation", "httpRequest": http_request, @@ -3372,7 +4178,14 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = BusinessGlossaryServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3383,19 +4196,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "GetOperation", "httpResponse": http_response, @@ -3406,9 +4221,12 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseBusinessGlossaryServiceRestTransport._BaseListOperations, BusinessGlossaryServiceRestStub): + class _ListOperations( + _BaseBusinessGlossaryServiceRestTransport._BaseListOperations, + BusinessGlossaryServiceRestStub, + ): def __hash__(self): return hash("BusinessGlossaryServiceRestTransport.ListOperations") @@ -3420,27 +4238,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -3458,30 +4277,40 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListOperations", "httpRequest": http_request, @@ -3490,7 +4319,16 @@ def __call__(self, ) # Send the request - response = BusinessGlossaryServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + BusinessGlossaryServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3501,19 +4339,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", "rpcName": "ListOperations", "httpResponse": http_response, @@ -3530,6 +4370,4 @@ def close(self): self._session.close() -__all__=( - 'BusinessGlossaryServiceRestTransport', -) +__all__ = ("BusinessGlossaryServiceRestTransport",) diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py similarity index 53% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py index 8692eaba3d04..16c3ee1bdd58 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py @@ -14,22 +14,20 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import BusinessGlossaryServiceTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.dataplex_v1.types import business_glossary -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport class _BaseBusinessGlossaryServiceRestTransport(BusinessGlossaryServiceTransport): @@ -45,14 +43,16 @@ class _BaseBusinessGlossaryServiceRestTransport(BusinessGlossaryServiceTransport It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'dataplex.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -76,7 +76,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -87,27 +89,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateGlossary: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "glossaryId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "glossaryId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/glossaries', - 'body': 'glossary', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/glossaries", + "body": "glossary", + }, ] return http_options @@ -122,17 +130,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -141,20 +155,26 @@ class _BaseCreateGlossaryCategory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "categoryId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "categoryId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/categories', - 'body': 'category', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/categories", + "body": "category", + }, ] return http_options @@ -169,17 +189,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -188,20 +214,26 @@ class _BaseCreateGlossaryTerm: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "termId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "termId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/terms', - 'body': 'term', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/terms", + "body": "term", + }, ] return http_options @@ -216,17 +248,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -235,19 +273,23 @@ class _BaseDeleteGlossary: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*}", + }, ] return http_options @@ -259,11 +301,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -272,19 +320,23 @@ class _BaseDeleteGlossaryCategory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/categories/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/categories/*}", + }, ] return http_options @@ -296,11 +348,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -309,19 +367,23 @@ class _BaseDeleteGlossaryTerm: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/terms/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/terms/*}", + }, ] return http_options @@ -333,11 +395,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -346,19 +414,23 @@ class _BaseGetGlossary: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*}", + }, ] return http_options @@ -370,11 +442,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -383,19 +461,23 @@ class _BaseGetGlossaryCategory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/categories/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/categories/*}", + }, ] return http_options @@ -407,11 +489,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -420,19 +508,23 @@ class _BaseGetGlossaryTerm: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/glossaries/*/terms/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/terms/*}", + }, ] return http_options @@ -444,11 +536,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -457,19 +555,23 @@ class _BaseListGlossaries: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/glossaries', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/glossaries", + }, ] return http_options @@ -481,11 +583,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -494,19 +602,23 @@ class _BaseListGlossaryCategories: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/categories', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/categories", + }, ] return http_options @@ -518,11 +630,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -531,19 +649,23 @@ class _BaseListGlossaryTerms: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/glossaries/*}/terms', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/terms", + }, ] return http_options @@ -555,11 +677,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -568,20 +696,26 @@ class _BaseUpdateGlossary: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{glossary.name=projects/*/locations/*/glossaries/*}', - 'body': 'glossary', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{glossary.name=projects/*/locations/*/glossaries/*}", + "body": "glossary", + }, ] return http_options @@ -596,17 +730,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -615,20 +755,26 @@ class _BaseUpdateGlossaryCategory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}', - 'body': 'category', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}", + "body": "category", + }, ] return http_options @@ -643,17 +789,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -662,20 +814,26 @@ class _BaseUpdateGlossaryTerm: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}', - 'body': 'term', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}", + "body": "term", + }, ] return http_options @@ -690,17 +848,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -711,23 +875,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -736,23 +900,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -761,33 +925,34 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -796,27 +961,27 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -825,27 +990,27 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -854,30 +1019,28 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseBusinessGlossaryServiceRestTransport', -) +__all__ = ("_BaseBusinessGlossaryServiceRestTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 09ba265260b3..9f620c738b28 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -73,9 +73,9 @@ class CatalogServiceAsyncClient: """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -95,8 +95,12 @@ class CatalogServiceAsyncClient: parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) + entry_link_path = staticmethod(CatalogServiceClient.entry_link_path) + parse_entry_link_path = staticmethod(CatalogServiceClient.parse_entry_link_path) entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + glossary_path = staticmethod(CatalogServiceClient.glossary_path) + parse_glossary_path = staticmethod(CatalogServiceClient.parse_glossary_path) metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) common_billing_account_path = staticmethod( @@ -629,7 +633,7 @@ async def sample_delete_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1283,7 +1287,7 @@ async def sample_delete_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1661,7 +1665,7 @@ async def sample_create_entry_group(): parent (:class:`str`): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3021,7 +3025,7 @@ async def sample_search_entries(): name (:class:`str`): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3029,7 +3033,7 @@ async def sample_search_entries(): query (:class:`str`): Required. The query against which entries in scope should be matched. The query syntax is defined in - `Search syntax for Dataplex + `Search syntax for Dataplex Universal Catalog `__. This corresponds to the ``query`` field @@ -3124,8 +3128,8 @@ async def create_metadata_job( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. .. code-block:: python @@ -3605,6 +3609,367 @@ async def sample_cancel_metadata_job(): metadata=metadata, ) + async def create_entry_link( + self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = await client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]]): + The request object. Request message for CreateEntryLink. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (:class:`google.cloud.dataplex_v1.types.EntryLink`): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (:class:`str`): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry_link( + self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]]): + The request object. Request message for DeleteEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_link( + self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]]): + The request object. Request message for GetEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 8cf6cb9421c6..18128a64f5b6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -119,9 +119,9 @@ def get_transport_class( class CatalogServiceClient(metaclass=CatalogServiceClientMeta): """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -284,6 +284,30 @@ def parse_entry_group_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def entry_link_path( + project: str, + location: str, + entry_group: str, + entry_link: str, + ) -> str: + """Returns a fully-qualified entry_link string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format( + project=project, + location=location, + entry_group=entry_group, + entry_link=entry_link, + ) + + @staticmethod + def parse_entry_link_path(path: str) -> Dict[str, str]: + """Parses a entry_link path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entryLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def entry_type_path( project: str, @@ -306,6 +330,28 @@ def parse_entry_type_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def glossary_path( + project: str, + location: str, + glossary: str, + ) -> str: + """Returns a fully-qualified glossary string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + + @staticmethod + def parse_glossary_path(path: str) -> Dict[str, str]: + """Parses a glossary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def metadata_job_path( project: str, @@ -1134,7 +1180,7 @@ def sample_delete_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1773,7 +1819,7 @@ def sample_delete_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -2142,7 +2188,7 @@ def sample_create_entry_group(): parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3470,7 +3516,7 @@ def sample_search_entries(): name (str): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3478,7 +3524,7 @@ def sample_search_entries(): query (str): Required. The query against which entries in scope should be matched. The query syntax is defined in - `Search syntax for Dataplex + `Search syntax for Dataplex Universal Catalog `__. This corresponds to the ``query`` field @@ -3570,8 +3616,8 @@ def create_metadata_job( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. .. code-block:: python @@ -4039,6 +4085,358 @@ def sample_cancel_metadata_job(): metadata=metadata, ) + def create_entry_link( + self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]): + The request object. Request message for CreateEntryLink. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry_link( + self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]): + The request object. Request message for DeleteEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_link( + self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]): + The request object. Request message for GetEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CatalogServiceClient": return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py index be553e45fcb1..8c2fb9760b70 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -377,6 +377,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_entry_link: gapic_v1.method.wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: gapic_v1.method.wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: gapic_v1.method.wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -661,6 +676,33 @@ def cancel_metadata_job( ]: raise NotImplementedError() + @property + def create_entry_link( + self, + ) -> Callable[ + [catalog.CreateEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + + @property + def delete_entry_link( + self, + ) -> Callable[ + [catalog.DeleteEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + + @property + def get_entry_link( + self, + ) -> Callable[ + [catalog.GetEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py index e180c13a42ec..7396cba863b2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -116,9 +116,9 @@ class CatalogServiceGrpcTransport(CatalogServiceTransport): """gRPC backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -917,8 +917,8 @@ def create_metadata_job( r"""Return a callable for the create metadata job method over gRPC. Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. Returns: Callable[[~.CreateMetadataJobRequest], @@ -1022,6 +1022,84 @@ def cancel_metadata_job( ) return self._stubs["cancel_metadata_job"] + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entry_link" not in self._stubs: + self._stubs["create_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateEntryLink", + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["create_entry_link"] + + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entry_link" not in self._stubs: + self._stubs["delete_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink", + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["delete_entry_link"] + + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entry_link" not in self._stubs: + self._stubs["get_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetEntryLink", + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["get_entry_link"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py index 24d73cf3f057..9eac535a065e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -122,9 +122,9 @@ class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): """gRPC AsyncIO backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -963,8 +963,8 @@ def create_metadata_job( r"""Return a callable for the create metadata job method over gRPC. Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. Returns: Callable[[~.CreateMetadataJobRequest], @@ -1070,6 +1070,84 @@ def cancel_metadata_job( ) return self._stubs["cancel_metadata_job"] + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entry_link" not in self._stubs: + self._stubs["create_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateEntryLink", + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["create_entry_link"] + + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entry_link" not in self._stubs: + self._stubs["delete_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink", + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["delete_entry_link"] + + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entry_link" not in self._stubs: + self._stubs["get_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetEntryLink", + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["get_entry_link"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1313,6 +1391,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_entry_link: self._wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: self._wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: self._wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py index 5567f7a0b572..0d2b5af96919 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py @@ -105,6 +105,14 @@ def post_create_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_create_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -145,6 +153,14 @@ def post_delete_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -177,6 +193,14 @@ def post_get_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_get_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -439,6 +463,50 @@ def post_create_entry_group_with_metadata( """ return response, metadata + def pre_create_entry_link( + self, + request: catalog.CreateEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.CreateEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for create_entry_link + + DEPRECATED. Please use the `post_create_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry_link` interceptor runs + before the `post_create_entry_link_with_metadata` interceptor. + """ + return response + + def post_create_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_link_with_metadata` + interceptor in new development instead of the `post_create_entry_link` interceptor. + When both interceptors are used, this `post_create_entry_link_with_metadata` interceptor runs after the + `post_create_entry_link` interceptor. The (possibly modified) response returned by + `post_create_entry_link` will be passed to + `post_create_entry_link_with_metadata`. + """ + return response, metadata + def pre_create_entry_type( self, request: catalog.CreateEntryTypeRequest, @@ -671,6 +739,50 @@ def post_delete_entry_group_with_metadata( """ return response, metadata + def pre_delete_entry_link( + self, + request: catalog.DeleteEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.DeleteEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for delete_entry_link + + DEPRECATED. Please use the `post_delete_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry_link` interceptor runs + before the `post_delete_entry_link_with_metadata` interceptor. + """ + return response + + def post_delete_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_link_with_metadata` + interceptor in new development instead of the `post_delete_entry_link` interceptor. + When both interceptors are used, this `post_delete_entry_link_with_metadata` interceptor runs after the + `post_delete_entry_link` interceptor. The (possibly modified) response returned by + `post_delete_entry_link` will be passed to + `post_delete_entry_link_with_metadata`. + """ + return response, metadata + def pre_delete_entry_type( self, request: catalog.DeleteEntryTypeRequest, @@ -847,6 +959,50 @@ def post_get_entry_group_with_metadata( """ return response, metadata + def pre_get_entry_link( + self, + request: catalog.GetEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.GetEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for get_entry_link + + DEPRECATED. Please use the `post_get_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry_link` interceptor runs + before the `post_get_entry_link_with_metadata` interceptor. + """ + return response + + def post_get_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_link_with_metadata` + interceptor in new development instead of the `post_get_entry_link` interceptor. + When both interceptors are used, this `post_get_entry_link_with_metadata` interceptor runs after the + `post_get_entry_link` interceptor. The (possibly modified) response returned by + `post_get_entry_link` will be passed to + `post_get_entry_link_with_metadata`. + """ + return response, metadata + def pre_get_entry_type( self, request: catalog.GetEntryTypeRequest, @@ -1605,9 +1761,9 @@ class CatalogServiceRestTransport(_BaseCatalogServiceRestTransport): """REST backend synchronous transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -1735,7 +1891,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } @@ -2329,6 +2485,160 @@ def __call__( ) return resp + class _CreateEntryLink( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: catalog.CreateEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the create entry link method over HTTP. + + Args: + request (~.catalog.CreateEntryLinkRequest): + The request object. Request message for CreateEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_entry_link( + request, metadata + ) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_transcoded_request( + http_options, request + ) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CreateEntryType( _BaseCatalogServiceRestTransport._BaseCreateEntryType, CatalogServiceRestStub ): @@ -2675,7 +2985,7 @@ def __call__( Args: request (~.catalog.DeleteAspectTypeRequest): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3076,6 +3386,154 @@ def __call__( ) return resp + class _DeleteEntryLink( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: catalog.DeleteEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the delete entry link method over HTTP. + + Args: + request (~.catalog.DeleteEntryLinkRequest): + The request object. Request message for DeleteEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_entry_link( + request, metadata + ) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _DeleteEntryType( _BaseCatalogServiceRestTransport._BaseDeleteEntryType, CatalogServiceRestStub ): @@ -3116,7 +3574,7 @@ def __call__( Args: request (~.catalog.DeleteEntryTypeRequest): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3668,6 +4126,152 @@ def __call__( ) return resp + class _GetEntryLink( + _BaseCatalogServiceRestTransport._BaseGetEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: catalog.GetEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the get entry link method over HTTP. + + Args: + request (~.catalog.GetEntryLinkRequest): + The request object. Request message for GetEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_entry_link(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetEntryType( _BaseCatalogServiceRestTransport._BaseGetEntryType, CatalogServiceRestStub ): @@ -5624,6 +6228,14 @@ def create_entry_group( # In C++ this would require a dynamic_cast return self._CreateEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def create_entry_type( self, @@ -5662,6 +6274,14 @@ def delete_entry_group( # In C++ this would require a dynamic_cast return self._DeleteEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def delete_entry_type( self, @@ -5692,6 +6312,14 @@ def get_entry_group( # In C++ this would require a dynamic_cast return self._GetEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def get_entry_type( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py index 08197e264a59..52abebb66f4a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py @@ -326,6 +326,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryLinkId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks", + "body": "entry_link", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -583,6 +642,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -771,6 +877,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1589,7 +1742,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py index fb9385f91bd1..76c7d432686f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py @@ -72,7 +72,9 @@ class CmekServiceAsyncClient: - """Dataplex Cmek Service""" + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ _client: CmekServiceClient diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py index e326c3747c13..a7ba321eb8db 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py @@ -116,7 +116,9 @@ def get_transport_class( class CmekServiceClient(metaclass=CmekServiceClientMeta): - """Dataplex Cmek Service""" + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py index aef104f8dd51..8c33cc7ba381 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py @@ -114,7 +114,8 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class CmekServiceGrpcTransport(CmekServiceTransport): """gRPC backend transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py index 8f5ab10967a7..8bcd3af4b6a8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py @@ -120,7 +120,8 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class CmekServiceGrpcAsyncIOTransport(CmekServiceTransport): """gRPC AsyncIO backend transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py index 6ad765489106..dd50fbebd7af 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py @@ -521,7 +521,8 @@ class CmekServiceRestStub: class CmekServiceRestTransport(_BaseCmekServiceRestTransport): """REST backend synchronous transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -647,7 +648,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py index c4f258b5c33f..2e54082df7f3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py @@ -505,7 +505,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 512d8a8762c8..a7f0ccd1286b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -71,7 +71,9 @@ class ContentServiceAsyncClient: - """ContentService manages Notebook and SQL Scripts for Dataplex.""" + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ _client: ContentServiceClient diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 0ca7c45802f7..9b8f553b9c29 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -117,7 +117,9 @@ def get_transport_class( class ContentServiceClient(metaclass=ContentServiceClientMeta): - """ContentService manages Notebook and SQL Scripts for Dataplex.""" + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py index 155db682d69e..af860495abee 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -117,7 +117,8 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class ContentServiceGrpcTransport(ContentServiceTransport): """gRPC backend transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py index a5efa74a385b..d1c11078c720 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -123,7 +123,8 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): """gRPC AsyncIO backend transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py index 5e0ad44c6dda..fda9c44cde55 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py @@ -640,7 +640,8 @@ class ContentServiceRestStub: class ContentServiceRestTransport(_BaseContentServiceRestTransport): """REST backend synchronous transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py index 8101f1f2e67b..7543a0fb9ae8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py @@ -705,7 +705,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index b1122b6a83a3..3bea356be2aa 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -377,7 +377,7 @@ async def sample_create_data_scan(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -702,8 +702,8 @@ async def sample_delete_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -833,8 +833,8 @@ async def sample_get_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -966,7 +966,7 @@ async def sample_list_data_scans(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1092,8 +1092,8 @@ async def sample_run_data_scan(): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. @@ -1205,8 +1205,8 @@ async def sample_get_data_scan_job(): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1319,8 +1319,8 @@ async def sample_list_data_scan_jobs(): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ec713fafb500..ab1a281ce8e8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -881,7 +881,7 @@ def sample_create_data_scan(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1200,8 +1200,8 @@ def sample_delete_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1328,8 +1328,8 @@ def sample_get_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1458,7 +1458,7 @@ def sample_list_data_scans(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1581,8 +1581,8 @@ def sample_run_data_scan(): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. @@ -1691,8 +1691,8 @@ def sample_get_data_scan_job(): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1802,8 +1802,8 @@ def sample_list_data_scan_jobs(): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py index 0d1fa658b5af..61f6cf277ce8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py @@ -872,7 +872,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py index 5e859cc8d6c2..96597b805754 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py @@ -718,7 +718,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index 032cca13e66a..641733050837 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -802,7 +802,7 @@ async def sample_list_data_taxonomies(): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 2691daa384cb..6082f72ab552 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -1250,7 +1250,7 @@ def sample_list_data_taxonomies(): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py index 256f5e7e1828..015996d4fc74 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py @@ -1229,7 +1229,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py index 5c930ba39065..423e13010b14 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py @@ -1028,7 +1028,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index fdc921e1142c..cd442b454512 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -369,7 +369,7 @@ async def sample_create_lake(): parent (:class:`str`): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -802,7 +802,7 @@ async def sample_list_lakes(): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 6f8f6e5c4eb5..39bfaead92e7 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -956,7 +956,7 @@ def sample_create_lake(): parent (str): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1380,7 +1380,7 @@ def sample_list_lakes(): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py index 0dc13af97524..0e083f5373c2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py @@ -2113,7 +2113,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py index 8f41186c5000..6839dc53bd63 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py @@ -1940,7 +1940,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py index 8815f1509c1c..5850fd61fa1b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py @@ -706,7 +706,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py index 095590f45d4e..b33e533b219c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py @@ -14,6 +14,29 @@ # limitations under the License. # from .analyze import Content, Environment, Session +from .business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from .catalog import ( Aspect, AspectSource, @@ -21,20 +44,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -129,6 +156,7 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from .datascans_common import DataScanCatalogPublishingStatus from .logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -212,25 +240,50 @@ "Content", "Environment", "Session", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "Aspect", "AspectSource", "AspectType", "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "Entry", "EntryGroup", + "EntryLink", "EntrySource", "EntryType", "GetAspectTypeRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetMetadataJobRequest", @@ -317,6 +370,7 @@ "RunDataScanResponse", "UpdateDataScanRequest", "DataScanType", + "DataScanCatalogPublishingStatus", "BusinessGlossaryEvent", "DataQualityScanRuleResult", "DataScanEvent", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py index 8401a158be6d..540dffe6b381 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py @@ -130,7 +130,8 @@ class OsImageRuntime(proto.Message): Attributes: image_version (str): - Required. Dataplex Image version. + Required. Dataplex Universal Catalog Image + version. java_libraries (MutableSequence[str]): Optional. List of Java jars to be included in the runtime environment. Valid input includes diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py similarity index 91% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py index 81794bb5d454..199eea0392af 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/business_glossary.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py @@ -17,36 +17,34 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.dataplex.v1', + package="google.cloud.dataplex.v1", manifest={ - 'Glossary', - 'GlossaryCategory', - 'GlossaryTerm', - 'CreateGlossaryRequest', - 'UpdateGlossaryRequest', - 'DeleteGlossaryRequest', - 'GetGlossaryRequest', - 'ListGlossariesRequest', - 'ListGlossariesResponse', - 'CreateGlossaryCategoryRequest', - 'UpdateGlossaryCategoryRequest', - 'DeleteGlossaryCategoryRequest', - 'GetGlossaryCategoryRequest', - 'ListGlossaryCategoriesRequest', - 'ListGlossaryCategoriesResponse', - 'CreateGlossaryTermRequest', - 'UpdateGlossaryTermRequest', - 'DeleteGlossaryTermRequest', - 'GetGlossaryTermRequest', - 'ListGlossaryTermsRequest', - 'ListGlossaryTermsResponse', + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "CreateGlossaryRequest", + "UpdateGlossaryRequest", + "DeleteGlossaryRequest", + "GetGlossaryRequest", + "ListGlossariesRequest", + "ListGlossariesResponse", + "CreateGlossaryCategoryRequest", + "UpdateGlossaryCategoryRequest", + "DeleteGlossaryCategoryRequest", + "GetGlossaryCategoryRequest", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "CreateGlossaryTermRequest", + "UpdateGlossaryTermRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryTermRequest", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", }, ) @@ -324,10 +322,10 @@ class CreateGlossaryRequest(proto.Message): proto.STRING, number=2, ) - glossary: 'Glossary' = proto.Field( + glossary: "Glossary" = proto.Field( proto.MESSAGE, number=3, - message='Glossary', + message="Glossary", ) validate_only: bool = proto.Field( proto.BOOL, @@ -350,10 +348,10 @@ class UpdateGlossaryRequest(proto.Message): actually updating the Glossary. Default: false. """ - glossary: 'Glossary' = proto.Field( + glossary: "Glossary" = proto.Field( proto.MESSAGE, number=1, - message='Glossary', + message="Glossary", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -482,10 +480,10 @@ class ListGlossariesResponse(proto.Message): def raw_page(self): return self - glossaries: MutableSequence['Glossary'] = proto.RepeatedField( + glossaries: MutableSequence["Glossary"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Glossary', + message="Glossary", ) next_page_token: str = proto.Field( proto.STRING, @@ -520,10 +518,10 @@ class CreateGlossaryCategoryRequest(proto.Message): proto.STRING, number=2, ) - category: 'GlossaryCategory' = proto.Field( + category: "GlossaryCategory" = proto.Field( proto.MESSAGE, number=3, - message='GlossaryCategory', + message="GlossaryCategory", ) @@ -540,10 +538,10 @@ class UpdateGlossaryCategoryRequest(proto.Message): Required. The list of fields to update. """ - category: 'GlossaryCategory' = proto.Field( + category: "GlossaryCategory" = proto.Field( proto.MESSAGE, number=1, - message='GlossaryCategory', + message="GlossaryCategory", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -614,12 +612,9 @@ class ListGlossaryCategoriesRequest(proto.Message): - immediate_parent Examples of using a filter are: - ------------------------------- - - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` - ------------------------------------------------------------------------------------------------------- - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` This will only return the GlossaryCategories that are directly nested under the specified parent. @@ -671,10 +666,10 @@ class ListGlossaryCategoriesResponse(proto.Message): def raw_page(self): return self - categories: MutableSequence['GlossaryCategory'] = proto.RepeatedField( + categories: MutableSequence["GlossaryCategory"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='GlossaryCategory', + message="GlossaryCategory", ) next_page_token: str = proto.Field( proto.STRING, @@ -709,10 +704,10 @@ class CreateGlossaryTermRequest(proto.Message): proto.STRING, number=2, ) - term: 'GlossaryTerm' = proto.Field( + term: "GlossaryTerm" = proto.Field( proto.MESSAGE, number=3, - message='GlossaryTerm', + message="GlossaryTerm", ) @@ -729,10 +724,10 @@ class UpdateGlossaryTermRequest(proto.Message): Required. The list of fields to update. """ - term: 'GlossaryTerm' = proto.Field( + term: "GlossaryTerm" = proto.Field( proto.MESSAGE, number=1, - message='GlossaryTerm', + message="GlossaryTerm", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -801,12 +796,9 @@ class ListGlossaryTermsRequest(proto.Message): - immediate_parent Examples of using a filter are: - ------------------------------- - - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` - ------------------------------------------------------------------------------------------------------- - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` This will only return the GlossaryTerms that are directly nested under the specified parent. @@ -858,10 +850,10 @@ class ListGlossaryTermsResponse(proto.Message): def raw_page(self): return self - terms: MutableSequence['GlossaryTerm'] = proto.RepeatedField( + terms: MutableSequence["GlossaryTerm"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='GlossaryTerm', + message="GlossaryTerm", ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index e028f5e64efe..ee7e879c9bfa 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -69,6 +69,10 @@ "ListMetadataJobsResponse", "CancelMetadataJobRequest", "MetadataJob", + "EntryLink", + "CreateEntryLinkRequest", + "DeleteEntryLinkRequest", + "GetEntryLinkRequest", }, ) @@ -169,14 +173,15 @@ class AspectType(proto.Message): """ class Authorization(proto.Message): - r"""Autorization for an AspectType. + r"""Authorization for an AspectType. Attributes: alternate_use_permission (str): Immutable. The IAM permission grantable on the EntryGroup to allow access to instantiate - Aspects of Dataplex owned AspectTypes, only - settable for Dataplex owned Types. + Aspects of Dataplex Universal Catalog owned + AspectTypes, only settable for Dataplex + Universal Catalog owned Types. """ alternate_use_permission: str = proto.Field( @@ -208,8 +213,8 @@ class MetadataTemplate(proto.Message): Primitive types: - string - - integer - - boolean + - int + - bool - double - datetime. Must be of the format RFC3339 UTC "Zulu" (Examples: "2014-10-02T15:01:23Z" and @@ -615,8 +620,9 @@ class Authorization(proto.Message): alternate_use_permission (str): Immutable. The IAM permission grantable on the Entry Group to allow access to instantiate - Entries of Dataplex owned Entry Types, only - settable for Dataplex owned Types. + Entries of Dataplex Universal Catalog owned + Entry Types, only settable for Dataplex + Universal Catalog owned Types. """ alternate_use_permission: str = proto.Field( @@ -787,10 +793,10 @@ class Entry(proto.Message): ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the entry was - created in Dataplex. + created in Dataplex Universal Catalog. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the entry was last - updated in Dataplex. + updated in Dataplex Universal Catalog. aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): Optional. The aspects that are attached to the entry. Depending on how the aspect is attached to the entry, the @@ -974,7 +980,7 @@ class CreateEntryGroupRequest(proto.Message): parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. entry_group_id (str): Required. EntryGroup identifier. entry_group (google.cloud.dataplex_v1.types.EntryGroup): @@ -1223,7 +1229,7 @@ class UpdateEntryTypeRequest(proto.Message): class DeleteEntryTypeRequest(proto.Message): - r"""Delele EntryType Request. + r"""Delete EntryType Request. Attributes: name (str): @@ -1419,7 +1425,7 @@ class UpdateAspectTypeRequest(proto.Message): class DeleteAspectTypeRequest(proto.Message): - r"""Delele AspectType Request. + r"""Delete AspectType Request. Attributes: name (str): @@ -1859,11 +1865,11 @@ class SearchEntriesRequest(proto.Message): name (str): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. query (str): Required. The query against which entries in scope should be matched. The query syntax is defined in `Search syntax for - Dataplex + Dataplex Universal Catalog `__. page_size (int): Optional. Number of results in the search page. If <=0, then @@ -1877,7 +1883,7 @@ class SearchEntriesRequest(proto.Message): Optional. Specifies the ordering of results. Supported values are: - - ``relevance`` (default) + - ``relevance`` - ``last_modified_timestamp`` - ``last_modified_timestamp asc`` scope (str): @@ -2023,17 +2029,22 @@ class ImportItem(proto.Message): entry (google.cloud.dataplex_v1.types.Entry): Information about an entry and its attached aspects. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Information about the entry link. User should provide either + one of the entry or entry_link. While providing entry_link, + user should not provide update_mask and aspect_keys. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to update, in paths that are relative to the ``Entry`` resource. Separate each field with a comma. - In ``FULL`` entry sync mode, Dataplex includes the paths of - all of the fields for an entry that can be modified, - including aspects. This means that Dataplex replaces the - existing entry with the entry in the metadata import file. - All modifiable fields are updated, regardless of the fields - that are listed in the update mask, and regardless of - whether a field is present in the ``entry`` object. + In ``FULL`` entry sync mode, Dataplex Universal Catalog + includes the paths of all of the fields for an entry that + can be modified, including aspects. This means that Dataplex + Universal Catalog replaces the existing entry with the entry + in the metadata import file. All modifiable fields are + updated, regardless of the fields that are listed in the + update mask, and regardless of whether a field is present in + the ``entry`` object. The ``update_mask`` field is ignored when an entry is created or re-created. @@ -2041,10 +2052,11 @@ class ImportItem(proto.Message): In an aspect-only metadata job (when entry sync mode is ``NONE``), set this value to ``aspects``. - Dataplex also determines which entries and aspects to modify - by comparing the values and timestamps that you provide in - the metadata import file with the values and timestamps that - exist in your project. For more information, see `Comparison + Dataplex Universal Catalog also determines which entries and + aspects to modify by comparing the values and timestamps + that you provide in the metadata import file with the values + and timestamps that exist in your project. For more + information, see `Comparison logic `__. aspect_keys (MutableSequence[str]): The aspects to modify. Supports the following syntaxes: @@ -2065,8 +2077,9 @@ class ImportItem(proto.Message): In ``FULL`` entry sync mode, if you leave this field empty, it is treated as specifying exactly those aspects that are - present within the specified entry. Dataplex implicitly adds - the keys for all of the required aspects of an entry. + present within the specified entry. Dataplex Universal + Catalog implicitly adds the keys for all of the required + aspects of an entry. """ entry: "Entry" = proto.Field( @@ -2074,6 +2087,11 @@ class ImportItem(proto.Message): number=1, message="Entry", ) + entry_link: "EntryLink" = proto.Field( + proto.MESSAGE, + number=4, + message="EntryLink", + ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, @@ -2332,6 +2350,15 @@ class ImportJobResult(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the status was updated. + deleted_entry_links (int): + Output only. The total number of entry links + that were successfully deleted. + created_entry_links (int): + Output only. The total number of entry links + that were successfully created. + unchanged_entry_links (int): + Output only. The total number of entry links + that were left unchanged. """ deleted_entries: int = proto.Field( @@ -2359,6 +2386,18 @@ class ImportJobResult(proto.Message): number=5, message=timestamp_pb2.Timestamp, ) + deleted_entry_links: int = proto.Field( + proto.INT64, + number=7, + ) + created_entry_links: int = proto.Field( + proto.INT64, + number=8, + ) + unchanged_entry_links: int = proto.Field( + proto.INT64, + number=9, + ) class ExportJobResult(proto.Message): r"""Summary results from a metadata export job. The results are a @@ -2451,11 +2490,12 @@ class SyncMode(proto.Enum): Sync mode unspecified. FULL (1): All resources in the job's scope are - modified. If a resource exists in Dataplex but - isn't included in the metadata import file, the - resource is deleted when you run the metadata - job. Use this mode to perform a full sync of the - set of entries in the job scope. + modified. If a resource exists in Dataplex + Universal Catalog but isn't included in the + metadata import file, the resource is deleted + when you run the metadata job. Use this mode to + perform a full sync of the set of entries in the + job scope. This sync mode is supported for entries. INCREMENTAL (2): @@ -2550,6 +2590,41 @@ class ImportJobScope(proto.Message): The location of an aspect type must either match the location of the job, or the aspect type must be global. + glossaries (MutableSequence[str]): + Optional. The glossaries that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/glossaries/{glossary_id}``. + + While importing Business Glossary entries, the user must + provide glossaries. While importing entries, the user does + not have to provide glossaries. If the metadata import file + attempts to modify Business Glossary entries whose glossary + isn't included in this list, the import job will skip those + entries. + + The location of a glossary must either match the location of + the job, or the glossary must be global. + entry_link_types (MutableSequence[str]): + Optional. The entry link types that are in scope for the + import job, specified as relative resource names in the + format + ``projects/{project_number_or_id}/locations/{location_id}/entryLinkTypes/{entry_link_type_id}``. + The job modifies only the entryLinks that belong to these + entry link types. + + If the metadata import file attempts to create or delete an + entry link whose entry link type isn't included in this + list, the import job will skip those entry links. + referenced_entry_scopes (MutableSequence[str]): + Optional. Defines the scope of entries that can be + referenced in the entry links. + + Currently, projects are supported as valid scopes. Format: + ``projects/{project_number_or_id}`` + + If the metadata import file attempts to create an entry link + which references an entry that is not in the scope, the + import job will skip that entry link. """ entry_groups: MutableSequence[str] = proto.RepeatedField( @@ -2564,6 +2639,18 @@ class ImportJobScope(proto.Message): proto.STRING, number=3, ) + glossaries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + entry_link_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + referenced_entry_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) source_storage_uri: str = proto.Field( proto.STRING, @@ -2607,9 +2694,9 @@ class ExportJobSpec(proto.Message): You can optionally specify a custom prefix after the bucket name, in the format ``gs://{bucket}/{prefix}/``. The maximum length of the custom prefix is 128 characters. Dataplex - constructs the object path for the exported files by using - the bucket name and prefix that you provide, followed by a - system-generated path. + Universal Catalog constructs the object path for the + exported files by using the bucket name and prefix that you + provide, followed by a system-generated path. The bucket must be in the same VPC Service Controls perimeter as the job. @@ -2825,4 +2912,174 @@ class State(proto.Enum): ) +class EntryLink(proto.Message): + r"""EntryLink represents a link between two Entries. + + Attributes: + name (str): + Output only. Immutable. Identifier. The relative resource + name of the Entry Link, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}`` + entry_link_type (str): + Required. Immutable. Relative resource name of the Entry + Link Type used to create this Entry Link. For example: + + - Entry link between synonym terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/synonym`` + - Entry link between related terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/related`` + - Entry link between glossary terms and data assets: + ``projects/dataplex-types/locations/global/entryLinkTypes/definition`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + last updated. + entry_references (MutableSequence[google.cloud.dataplex_v1.types.EntryLink.EntryReference]): + Required. Specifies the Entries referenced in + the Entry Link. There should be exactly two + entry references. + """ + + class EntryReference(proto.Message): + r"""Reference to the Entry that is linked through the Entry Link. + + Attributes: + name (str): + Required. Immutable. The relative resource name of the + referenced Entry, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` + path (str): + Immutable. The path in the Entry that is + referenced in the Entry Link. Empty path denotes + that the Entry itself is referenced in the Entry + Link. + type_ (google.cloud.dataplex_v1.types.EntryLink.EntryReference.Type): + Required. Immutable. The reference type of + the Entry. + """ + + class Type(proto.Enum): + r"""Reference type of the Entry. + + Values: + UNSPECIFIED (0): + Unspecified reference type. Implies that the + Entry is referenced in a non-directional Entry + Link. + SOURCE (2): + The Entry is referenced as the source of the + directional Entry Link. + TARGET (3): + The Entry is referenced as the target of the + directional Entry Link. + """ + UNSPECIFIED = 0 + SOURCE = 2 + TARGET = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + type_: "EntryLink.EntryReference.Type" = proto.Field( + proto.ENUM, + number=3, + enum="EntryLink.EntryReference.Type", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_type: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + entry_references: MutableSequence[EntryReference] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=EntryReference, + ) + + +class CreateEntryLinkRequest(proto.Message): + r"""Request message for CreateEntryLink. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_link: "EntryLink" = proto.Field( + proto.MESSAGE, + number=3, + message="EntryLink", + ) + + +class DeleteEntryLinkRequest(proto.Message): + r"""Request message for DeleteEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEntryLinkRequest(proto.Message): + r"""Request message for GetEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py index eb5ff0a710ba..0e571059f10d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py @@ -81,6 +81,11 @@ class BigQueryPublishingConfig(proto.Message): For supported values, refer to https://cloud.google.com/bigquery/docs/locations#supported_locations. + project (str): + Optional. The project of the BigQuery dataset to publish + BigLake external or non-BigLake external tables to. If not + specified, the project of the Cloud Storage bucket will be + used. The format is "projects/{project_id_or_number}". """ class TableType(proto.Enum): @@ -119,6 +124,10 @@ class TableType(proto.Enum): proto.STRING, number=4, ) + project: str = proto.Field( + proto.STRING, + number=5, + ) class StorageConfig(proto.Message): r"""Configurations related to Cloud Storage as the data source. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index e4d3680a5ee8..9c254e7f8f9c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -45,11 +45,10 @@ class DataProfileSpec(proto.Message): 100. row_filter (str): - Optional. A filter applied to all rows in a single DataScan - job. The filter needs to be a valid SQL expression for a - `WHERE clause in GoogleSQL - syntax `__. - + Optional. A filter applied to all rows in a + single DataScan job. The filter needs to be a + valid SQL expression for a WHERE clause in + BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10 post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): Optional. Actions to take upon job @@ -85,7 +84,6 @@ class BigQueryExport(proto.Message): Optional. The BigQuery table to export DataProfileScan results to. Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID """ results_table: str = proto.Field( @@ -151,11 +149,13 @@ class DataProfileResult(proto.Message): Attributes: row_count (int): - The count of rows scanned. + Output only. The count of rows scanned. profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): - The profile information per field. + Output only. The profile information per + field. scanned_data (google.cloud.dataplex_v1.types.ScannedData): - The data scanned for this result. + Output only. The data scanned for this + result. post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): Output only. The result of post scan actions. """ @@ -166,8 +166,8 @@ class Profile(proto.Message): Attributes: fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): - List of fields with structural and profile - information for each field. + Output only. List of fields with structural + and profile information for each field. """ class Field(proto.Message): @@ -175,23 +175,23 @@ class Field(proto.Message): Attributes: name (str): - The name of the field. + Output only. The name of the field. type_ (str): - The data type retrieved from the schema of the data source. - For instance, for a BigQuery native table, it is the - `BigQuery Table + Output only. The data type retrieved from the schema of the + data source. For instance, for a BigQuery native table, it + is the `BigQuery Table Schema `__. - For a Dataplex Entity, it is the `Entity + For a Dataplex Universal Catalog Entity, it is the `Entity Schema `__. mode (str): - The mode of the field. Possible values include: + Output only. The mode of the field. Possible values include: - REQUIRED, if it is a required field. - NULLABLE, if it is an optional field. - REPEATED, if it is a repeated field. profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): - Profile information for the corresponding - field. + Output only. Profile information for the + corresponding field. """ class ProfileInfo(proto.Message): @@ -206,22 +206,23 @@ class ProfileInfo(proto.Message): Attributes: null_ratio (float): - Ratio of rows with null value against total - scanned rows. + Output only. Ratio of rows with null value + against total scanned rows. distinct_ratio (float): - Ratio of rows with distinct values against - total scanned rows. Not available for complex + Output only. Ratio of rows with distinct + values against total scanned rows. Not available + for complex non-groupable field type, including + RECORD, ARRAY, GEOGRAPHY, and JSON, as well as + fields with REPEATABLE mode. + top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): + Output only. The list of top N non-null + values, frequency and ratio with which they + occur in the scanned data. N is 10 or equal to + the number of distinct values in the field, + whichever is smaller. Not available for complex non-groupable field type, including RECORD, ARRAY, GEOGRAPHY, and JSON, as well as fields with REPEATABLE mode. - top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): - The list of top N non-null values, frequency - and ratio with which they occur in the scanned - data. N is 10 or equal to the number of distinct - values in the field, whichever is smaller. Not - available for complex non-groupable field type, - including RECORD, ARRAY, GEOGRAPHY, and JSON, as - well as fields with REPEATABLE mode. string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): String type field information. @@ -241,14 +242,14 @@ class StringFieldInfo(proto.Message): Attributes: min_length (int): - Minimum length of non-null values in the - scanned data. + Output only. Minimum length of non-null + values in the scanned data. max_length (int): - Maximum length of non-null values in the - scanned data. + Output only. Maximum length of non-null + values in the scanned data. average_length (float): - Average length of non-null values in the - scanned data. + Output only. Average length of non-null + values in the scanned data. """ min_length: int = proto.Field( @@ -269,34 +270,35 @@ class IntegerFieldInfo(proto.Message): Attributes: average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. min_ (int): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. quartiles (MutableSequence[int]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of approximate - quartile values for the scanned data, occurring - in order Q1, median, Q3. + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + approximate quartile values for the scanned + data, occurring in order Q1, median, Q3. max_ (int): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. """ average: float = proto.Field( @@ -325,34 +327,35 @@ class DoubleFieldInfo(proto.Message): Attributes: average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. min_ (float): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. quartiles (MutableSequence[float]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of quartile values - for the scanned data, occurring in order Q1, - median, Q3. + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + quartile values for the scanned data, occurring + in order Q1, median, Q3. max_ (float): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. """ average: float = proto.Field( @@ -381,14 +384,15 @@ class TopNValue(proto.Message): Attributes: value (str): - String value of a top N non-null value. + Output only. String value of a top N non-null + value. count (int): - Count of the corresponding value in the - scanned data. + Output only. Count of the corresponding value + in the scanned data. ratio (float): - Ratio of the corresponding value in the field - against the total number of rows in the scanned - data. + Output only. Ratio of the corresponding value + in the field against the total number of rows in + the scanned data. """ value: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 7bd150fde2ce..23dc6dea5c1f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -19,7 +19,7 @@ import proto # type: ignore -from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import datascans_common, processing __protobuf__ = proto.module( package="google.cloud.dataplex.v1", @@ -63,6 +63,10 @@ class DataQualitySpec(proto.Message): post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): Optional. Actions to take upon job completion. + catalog_publishing_enabled (bool): + Optional. If set, the latest DataScan job + result will be published as Dataplex Universal + Catalog metadata. """ class PostScanActions(proto.Message): @@ -208,6 +212,10 @@ class NotificationReport(proto.Message): number=6, message=PostScanActions, ) + catalog_publishing_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) class DataQualityResult(proto.Message): @@ -248,6 +256,10 @@ class DataQualityResult(proto.Message): result. post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): Output only. The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + Output only. The status of publishing the + data scan as Dataplex Universal Catalog + metadata. """ class PostScanActionsResult(proto.Message): @@ -346,6 +358,13 @@ class State(proto.Enum): number=8, message=PostScanActionsResult, ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = ( + proto.Field( + proto.MESSAGE, + number=11, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + ) class DataQualityRuleResult(proto.Message): @@ -482,10 +501,10 @@ class DataQualityDimension(proto.Message): Attributes: name (str): - Optional. The dimension name a rule belongs - to. Custom dimension name is supported with all - uppercase letters and maximum length of 30 - characters. + Output only. The dimension name a rule + belongs to. Custom dimension name is supported + with all uppercase letters and maximum length of + 30 characters. """ name: str = proto.Field( @@ -567,10 +586,11 @@ class DataQualityRule(proto.Message): - SetExpectation - UniquenessExpectation dimension (str): - Required. The dimension a rule belongs to. Results are also - aggregated at the dimension level. Supported dimensions are - **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "FRESHNESS", "VOLUME"]** + Required. The dimension a rule belongs to. + Results are also aggregated at the dimension + level. Custom dimension name is supported with + all uppercase letters and maximum length of 30 + characters. threshold (float): Optional. The minimum ratio of **passing_rows / total_rows** required to pass this rule, with a range of [0.0, 1.0]. @@ -919,6 +939,12 @@ class DataQualityColumnResult(proto.Message): points). This field is a member of `oneof`_ ``_score``. + passed (bool): + Output only. Whether the column passed or + failed. + dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): + Output only. The dimension-level results for + this column. """ column: str = proto.Field( @@ -930,6 +956,15 @@ class DataQualityColumnResult(proto.Message): number=2, optional=True, ) + passed: bool = proto.Field( + proto.BOOL, + number=3, + ) + dimensions: MutableSequence["DataQualityDimensionResult"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DataQualityDimensionResult", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py index cb4e92cf412d..3e8e104b6a3a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py @@ -479,7 +479,7 @@ class ListDataTaxonomiesRequest(proto.Message): parent (str): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of DataTaxonomies to return. The service may return fewer than this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index e9f3df8f1d40..aa370fd2abb8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -79,7 +79,7 @@ class CreateDataScanRequest(proto.Message): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. data_scan (google.cloud.dataplex_v1.types.DataScan): Required. DataScan resource. data_scan_id (str): @@ -153,7 +153,8 @@ class DeleteDataScanRequest(proto.Message): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. force (bool): Optional. If set to true, any child resources of this data scan will also be deleted. @@ -179,7 +180,8 @@ class GetDataScanRequest(proto.Message): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): Optional. Select the DataScan view to return. Defaults to ``BASIC``. @@ -219,7 +221,7 @@ class ListDataScansRequest(proto.Message): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of dataScans to return. The service may return fewer than this @@ -303,7 +305,8 @@ class RunDataScanRequest(proto.Message): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. """ @@ -337,7 +340,8 @@ class GetDataScanJobRequest(proto.Message): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): Optional. Select the DataScanJob view to return. Defaults to ``BASIC``. @@ -377,7 +381,8 @@ class ListDataScanJobsRequest(proto.Message): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. page_size (int): Optional. Maximum number of DataScanJobs to return. The service may return fewer than this @@ -483,8 +488,8 @@ class GenerateDataQualityRulesResponse(proto.Message): Attributes: rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): The data quality rules that Dataplex - generates based on the results of a data - profiling scan. + Universal Catalog generates based on the results + of a data profiling scan. """ rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( @@ -527,7 +532,8 @@ class DataScan(proto.Message): scan, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. uid (str): Output only. System generated globally unique ID for the scan. This ID will be different if @@ -768,7 +774,8 @@ class DataScanJob(proto.Message): DataScanJob, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. uid (str): Output only. System generated globally unique ID for the DataScanJob. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py similarity index 95% rename from owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py rename to packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py index a23567bed152..1a8ef46d0a0c 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/google/cloud/dataplex_v1/types/datascans_common.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.dataplex.v1', + package="google.cloud.dataplex.v1", manifest={ - 'DataScanCatalogPublishingStatus', + "DataScanCatalogPublishingStatus", }, ) @@ -37,6 +36,7 @@ class DataScanCatalogPublishingStatus(proto.Message): Output only. Execution state for catalog publishing. """ + class State(proto.Enum): r"""Execution state for the publishing. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index 7b1336f4f29e..54f0851abf29 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -21,6 +21,8 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.dataplex_v1.types import datascans_common + __protobuf__ = proto.module( package="google.cloud.dataplex.v1", manifest={ @@ -434,8 +436,9 @@ class ExecutionTrigger(proto.Enum): EXECUTION_TRIGGER_UNSPECIFIED (0): The job execution trigger is unspecified. TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. RUN_REQUEST (2): The job was triggered by the explicit call of Task API. @@ -837,6 +840,9 @@ class DataScanEvent(proto.Message): This field is a member of `oneof`_ ``appliedConfigs``. post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + The status of publishing the data scan as + Dataplex Universal Catalog metadata. """ class ScanType(proto.Enum): @@ -872,7 +878,7 @@ class State(proto.Enum): CANCELLED (4): Data scan job was cancelled. CREATED (5): - Data scan job was createed. + Data scan job was created. """ STATE_UNSPECIFIED = 0 STARTED = 1 @@ -1180,6 +1186,13 @@ class State(proto.Enum): number=11, message=PostScanActionsResult, ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = ( + proto.Field( + proto.MESSAGE, + number=13, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + ) class DataQualityScanRuleResult(proto.Message): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py index 4fc49722de73..bfd47aa4b8b8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py @@ -719,20 +719,21 @@ class Schema(proto.Message): Attributes: user_managed (bool): Required. Set to ``true`` if user-managed or ``false`` if - managed by Dataplex. The default is ``false`` (managed by - Dataplex). - - - Set to ``false``\ to enable Dataplex discovery to update - the schema. including new data discovery, schema - inference, and schema evolution. Users retain the ability - to input and edit the schema. Dataplex treats schema input - by the user as though produced by a previous Dataplex + managed by Dataplex Universal Catalog. The default is + ``false`` (managed by Dataplex Universal Catalog). + + - Set to ``false``\ to enable Dataplex Universal Catalog + discovery to update the schema. including new data + discovery, schema inference, and schema evolution. Users + retain the ability to input and edit the schema. Dataplex + Universal Catalog treats schema input by the user as + though produced by a previous Dataplex Universal Catalog discovery operation, and it will evolve the schema and take action based on that treatment. - Set to ``true`` to fully manage the entity schema. This - setting guarantees that Dataplex will not change schema - fields. + setting guarantees that Dataplex Universal Catalog will + not change schema fields. fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): Optional. The sequence of fields describing data in table entities. **Note:** BigQuery SchemaFields are immutable. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py index 6eb8897ac0fb..cbf390d581b3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py @@ -103,15 +103,19 @@ class DataSource(proto.Message): Attributes: entity (str): - Immutable. The Dataplex entity that represents the data - source (e.g. BigQuery table) for DataScan, of the form: + Immutable. The Dataplex Universal Catalog entity that + represents the data source (e.g. BigQuery table) for + DataScan, of the form: ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. This field is a member of `oneof`_ ``source``. resource (str): Immutable. The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field - could be: BigQuery table of type "TABLE" for + could either be: Cloud Storage bucket for DataDiscoveryScan + Format: + //storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID + or BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID @@ -151,13 +155,15 @@ class IncrementalField(proto.Message): Attributes: field (str): - The field that contains values which - monotonically increases over time (e.g. a + Output only. The field that contains values + which monotonically increases over time (e.g. a timestamp column). start (str): - Value that marks the start of the range. + Output only. Value that marks the start of + the range. end (str): - Value that marks the end of the range. + Output only. Value that marks the end of the + range. """ field: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py index 46c4f9c61a7f..009cf0e32bd1 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py @@ -81,7 +81,7 @@ class CreateLakeRequest(proto.Message): parent (str): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. lake_id (str): Required. Lake identifier. This ID will be used to generate names such as database and dataset names when publishing @@ -170,7 +170,7 @@ class ListLakesRequest(proto.Message): parent (str): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of Lakes to return. The service may return fewer than this value. If diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py index a8f313e987ce..7c0d9a249915 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py @@ -665,7 +665,8 @@ class State(proto.Enum): FAILED (5): The job is no longer running due to an error. ABORTED (6): - The job was cancelled outside of Dataplex. + The job was cancelled outside of Dataplex + Universal Catalog. """ STATE_UNSPECIFIED = 0 RUNNING = 1 @@ -682,8 +683,9 @@ class Trigger(proto.Enum): TRIGGER_UNSPECIFIED (0): The trigger is unspecified. TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. RUN_REQUEST (2): The job was triggered by the explicit call of Task API. diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py diff --git a/owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-dataplex/v1/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py rename to packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index bba5daaf9f93..a12a3f36de1e 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -11,6 +11,2481 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py" + }, { "canonical": true, "clientMethod": { @@ -269,23 +2744,200 @@ "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateAspectType" + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" }, { "name": "parent", "type": "str" }, { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" }, { - "name": "aspect_type_id", + "name": "entry_group_id", "type": "str" }, { @@ -302,21 +2954,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_aspect_type" + "shortName": "create_entry_group" }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -326,22 +2978,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" }, { "canonical": true, @@ -351,30 +3003,30 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateEntryGroup" + "shortName": "CreateEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" }, { - "name": "entry_group_id", + "name": "entry_link_id", "type": "str" }, { @@ -390,22 +3042,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_async", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -415,22 +3067,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_link_async.py" }, { "canonical": true, @@ -439,30 +3091,30 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateEntryGroup" + "shortName": "CreateEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" }, { - "name": "entry_group_id", + "name": "entry_link_id", "type": "str" }, { @@ -478,22 +3130,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_sync", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -503,22 +3155,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py" }, { "canonical": true, @@ -977,23 +3629,184 @@ "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateMetadataJob" + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" }, { - "name": "metadata_job_id", + "name": "name", "type": "str" }, { @@ -1010,21 +3823,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_metadata_job" + "shortName": "delete_aspect_type" }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", "segments": [ { - "end": 63, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1034,22 +3847,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" }, { "canonical": true, @@ -1059,19 +3872,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteAspectType" + "shortName": "DeleteEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" }, { "name": "name", @@ -1091,13 +3904,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_aspect_type" + "shortName": "delete_entry_group" }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", "segments": [ { "end": 55, @@ -1130,7 +3943,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" }, { "canonical": true, @@ -1139,19 +3952,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteAspectType" + "shortName": "DeleteEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" }, { "name": "name", @@ -1171,13 +3984,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_aspect_type" + "shortName": "delete_entry_group" }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", "segments": [ { "end": 55, @@ -1210,7 +4023,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" }, { "canonical": true, @@ -1220,19 +4033,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteEntryGroup" + "shortName": "DeleteEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" }, { "name": "name", @@ -1251,22 +4064,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1281,17 +4094,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py" }, { "canonical": true, @@ -1300,19 +4113,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteEntryGroup" + "shortName": "DeleteEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" }, { "name": "name", @@ -1331,22 +4144,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1361,17 +4174,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py" }, { "canonical": true, @@ -2017,6 +4830,167 @@ ], "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index 59d5de09c6c0..e054db378114 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -52,8 +52,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'create_entity': ('parent', 'entity', 'validate_only', ), 'create_entry': ('parent', 'entry_id', 'entry', ), 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), + 'create_entry_link': ('parent', 'entry_link_id', 'entry_link', ), 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), + 'create_glossary': ('parent', 'glossary_id', 'glossary', 'validate_only', ), + 'create_glossary_category': ('parent', 'category_id', 'category', ), + 'create_glossary_term': ('parent', 'term_id', 'term', ), 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), 'create_partition': ('parent', 'partition', 'validate_only', ), @@ -70,8 +74,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'delete_entity': ('name', 'etag', ), 'delete_entry': ('name', ), 'delete_entry_group': ('name', 'etag', ), + 'delete_entry_link': ('name', ), 'delete_entry_type': ('name', 'etag', ), 'delete_environment': ('name', ), + 'delete_glossary': ('name', 'etag', ), + 'delete_glossary_category': ('name', ), + 'delete_glossary_term': ('name', ), 'delete_lake': ('name', ), 'delete_partition': ('name', 'etag', ), 'delete_task': ('name', ), @@ -89,8 +97,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'get_entity': ('name', 'view', ), 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), 'get_entry_group': ('name', ), + 'get_entry_link': ('name', ), 'get_entry_type': ('name', ), 'get_environment': ('name', ), + 'get_glossary': ('name', ), + 'get_glossary_category': ('name', ), + 'get_glossary_term': ('name', ), 'get_iam_policy': ('resource', 'options', ), 'get_job': ('name', ), 'get_lake': ('name', ), @@ -113,6 +125,9 @@ class dataplexCallTransformer(cst.CSTTransformer): 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_terms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_jobs': ('parent', 'page_size', 'page_token', ), 'list_lake_actions': ('parent', 'page_size', 'page_token', ), 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), @@ -141,6 +156,9 @@ class dataplexCallTransformer(cst.CSTTransformer): 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), 'update_environment': ('update_mask', 'environment', 'validate_only', ), + 'update_glossary': ('glossary', 'update_mask', 'validate_only', ), + 'update_glossary_category': ('category', 'update_mask', ), + 'update_glossary_term': ('term', 'update_mask', ), 'update_lake': ('update_mask', 'lake', 'validate_only', ), 'update_task': ('update_mask', 'task', 'validate_only', ), 'update_zone': ('update_mask', 'zone', 'validate_only', ), diff --git a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py similarity index 71% rename from owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py rename to packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py index 17416fe5528c..5482cc9626ac 100644 --- a/owl-bot-staging/google-cloud-dataplex/v1/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,58 +22,60 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataplex_v1.services.business_glossary_service import BusinessGlossaryServiceAsyncClient -from google.cloud.dataplex_v1.services.business_glossary_service import BusinessGlossaryServiceClient -from google.cloud.dataplex_v1.services.business_glossary_service import pagers -from google.cloud.dataplex_v1.services.business_glossary_service import transports -from google.cloud.dataplex_v1.types import business_glossary -from google.cloud.dataplex_v1.types import service from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - +from google.cloud.dataplex_v1.services.business_glossary_service import ( + BusinessGlossaryServiceAsyncClient, + BusinessGlossaryServiceClient, + pagers, + transports, +) +from google.cloud.dataplex_v1.types import business_glossary, service CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -87,9 +90,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -97,17 +102,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -118,101 +133,245 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(None) is None - assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + def test__read_environment_variables(): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (True, "auto", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: BusinessGlossaryServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "never", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "always", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", None) + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: BusinessGlossaryServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BusinessGlossaryServiceClient._read_environment_variables() == (False, "auto", "foo.com") + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert BusinessGlossaryServiceClient._get_client_cert_source(None, False) is None - assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BusinessGlossaryServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BusinessGlossaryServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BusinessGlossaryServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) -@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE - default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert BusinessGlossaryServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT - assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "always") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT - assert BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT - assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BusinessGlossaryServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - BusinessGlossaryServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert BusinessGlossaryServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BusinessGlossaryServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BusinessGlossaryServiceClient._get_universe_domain(None, None) == BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + assert ( + BusinessGlossaryServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BusinessGlossaryServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BusinessGlossaryServiceClient._get_universe_domain(None, None) + == BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: BusinessGlossaryServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -228,7 +387,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -241,14 +401,22 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (BusinessGlossaryServiceClient, "grpc"), - (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), - (BusinessGlossaryServiceClient, "rest"), -]) -def test_business_glossary_service_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), + ], +) +def test_business_glossary_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -256,52 +424,70 @@ def test_business_glossary_service_client_from_service_account_info(client_class assert isinstance(client, client_class) assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BusinessGlossaryServiceGrpcTransport, "grpc"), - (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BusinessGlossaryServiceRestTransport, "rest"), -]) -def test_business_glossary_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BusinessGlossaryServiceGrpcTransport, "grpc"), + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BusinessGlossaryServiceRestTransport, "rest"), + ], +) +def test_business_glossary_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (BusinessGlossaryServiceClient, "grpc"), - (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), - (BusinessGlossaryServiceClient, "rest"), -]) -def test_business_glossary_service_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), + ], +) +def test_business_glossary_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataplex.googleapis.com' + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" ) @@ -317,30 +503,53 @@ def test_business_glossary_service_client_get_transport_class(): assert transport == transports.BusinessGlossaryServiceGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc"), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest"), -]) -@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) -@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) -def test_business_glossary_service_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) +def test_business_glossary_service_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(BusinessGlossaryServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(BusinessGlossaryServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BusinessGlossaryServiceClient, 'get_transport_class') as gtc: + with mock.patch.object(BusinessGlossaryServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -358,13 +567,15 @@ def test_business_glossary_service_client_client_options(client_class, transport # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -376,7 +587,7 @@ def test_business_glossary_service_client_client_options(client_class, transport # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -396,23 +607,33 @@ def test_business_glossary_service_client_client_options(client_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -421,48 +642,102 @@ def test_business_glossary_service_client_client_options(client_class, transport api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", "true"), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", "false"), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", "true"), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) -@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + "true", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + "false", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + "true", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_business_glossary_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_business_glossary_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -481,12 +756,22 @@ def test_business_glossary_service_client_mtls_env_auto(client_class, transport_ # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -507,15 +792,22 @@ def test_business_glossary_service_client_mtls_env_auto(client_class, transport_ ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -525,19 +817,33 @@ def test_business_glossary_service_client_mtls_env_auto(client_class, transport_ ) -@pytest.mark.parametrize("client_class", [ - BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient -]) -@mock.patch.object(BusinessGlossaryServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BusinessGlossaryServiceClient)) -@mock.patch.object(BusinessGlossaryServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BusinessGlossaryServiceAsyncClient)) -def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source(client_class): +@pytest.mark.parametrize( + "client_class", [BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient] +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessGlossaryServiceAsyncClient), +) +def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -545,8 +851,12 @@ def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source(clie with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -564,16 +874,28 @@ def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source(clie # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -583,34 +905,62 @@ def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source(clie with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient -]) -@mock.patch.object(BusinessGlossaryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceClient)) -@mock.patch.object(BusinessGlossaryServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient)) +@pytest.mark.parametrize( + "client_class", [BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient] +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) def test_business_glossary_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE - default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -633,11 +983,19 @@ def test_business_glossary_service_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -645,27 +1003,48 @@ def test_business_glossary_service_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc"), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest"), -]) -def test_business_glossary_service_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + ), + ], +) +def test_business_glossary_service_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -674,24 +1053,45 @@ def test_business_glossary_service_client_client_options_scopes(client_class, tr api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", grpc_helpers), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceRestTransport, "rest", None), -]) -def test_business_glossary_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + None, + ), + ], +) +def test_business_glossary_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -700,11 +1100,14 @@ def test_business_glossary_service_client_client_options_credentials_file(client api_audience=None, ) + def test_business_glossary_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = BusinessGlossaryServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -719,23 +1122,38 @@ def test_business_glossary_service_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport, "grpc", grpc_helpers), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_business_glossary_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_business_glossary_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -762,9 +1180,7 @@ def test_business_glossary_service_client_create_channel_credentials_file(client credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="dataplex.googleapis.com", ssl_credentials=None, @@ -775,11 +1191,14 @@ def test_business_glossary_service_client_create_channel_credentials_file(client ) -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryRequest, - dict, -]) -def test_create_glossary(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryRequest, + dict, + ], +) +def test_create_glossary(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -790,11 +1209,9 @@ def test_create_glossary(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -812,30 +1229,31 @@ def test_create_glossary_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.CreateGlossaryRequest( - parent='parent_value', - glossary_id='glossary_id_value', + parent="parent_value", + glossary_id="glossary_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.CreateGlossaryRequest( - parent='parent_value', - glossary_id='glossary_id_value', + parent="parent_value", + glossary_id="glossary_id_value", ) + def test_create_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -854,7 +1272,9 @@ def test_create_glossary_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc request = {} client.create_glossary(request) @@ -873,8 +1293,11 @@ def test_create_glossary_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -888,12 +1311,17 @@ async def test_create_glossary_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_glossary in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_glossary + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_glossary] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary + ] = mock_rpc request = {} await client.create_glossary(request) @@ -912,8 +1340,12 @@ async def test_create_glossary_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryRequest): +async def test_create_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -924,12 +1356,10 @@ async def test_create_glossary_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_glossary(request) @@ -947,6 +1377,7 @@ async def test_create_glossary_async(transport: str = 'grpc_asyncio', request_ty async def test_create_glossary_async_from_dict(): await test_create_glossary_async(request_type=dict) + def test_create_glossary_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -956,13 +1387,11 @@ def test_create_glossary_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -973,9 +1402,9 @@ def test_create_glossary_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -988,13 +1417,13 @@ async def test_create_glossary_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1005,9 +1434,9 @@ async def test_create_glossary_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_glossary_flattened(): @@ -1016,17 +1445,15 @@ def test_create_glossary_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_glossary( - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) # Establish that the underlying call was made with the expected @@ -1034,13 +1461,13 @@ def test_create_glossary_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].glossary - mock_val = business_glossary.Glossary(name='name_value') + mock_val = business_glossary.Glossary(name="name_value") assert arg == mock_val arg = args[0].glossary_id - mock_val = 'glossary_id_value' + mock_val = "glossary_id_value" assert arg == mock_val @@ -1054,11 +1481,12 @@ def test_create_glossary_flattened_error(): with pytest.raises(ValueError): client.create_glossary( business_glossary.CreateGlossaryRequest(), - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) + @pytest.mark.asyncio async def test_create_glossary_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1066,21 +1494,19 @@ async def test_create_glossary_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_glossary( - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) # Establish that the underlying call was made with the expected @@ -1088,15 +1514,16 @@ async def test_create_glossary_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].glossary - mock_val = business_glossary.Glossary(name='name_value') + mock_val = business_glossary.Glossary(name="name_value") assert arg == mock_val arg = args[0].glossary_id - mock_val = 'glossary_id_value' + mock_val = "glossary_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_glossary_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1108,17 +1535,20 @@ async def test_create_glossary_flattened_error_async(): with pytest.raises(ValueError): await client.create_glossary( business_glossary.CreateGlossaryRequest(), - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryRequest, - dict, -]) -def test_update_glossary(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryRequest, + dict, + ], +) +def test_update_glossary(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1129,11 +1559,9 @@ def test_update_glossary(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1151,25 +1579,24 @@ def test_update_glossary_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = business_glossary.UpdateGlossaryRequest( - ) + request = business_glossary.UpdateGlossaryRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == business_glossary.UpdateGlossaryRequest( - ) + assert args[0] == business_glossary.UpdateGlossaryRequest() + def test_update_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1189,7 +1616,9 @@ def test_update_glossary_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc request = {} client.update_glossary(request) @@ -1208,8 +1637,11 @@ def test_update_glossary_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1223,12 +1655,17 @@ async def test_update_glossary_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_glossary in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_glossary + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_glossary] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary + ] = mock_rpc request = {} await client.update_glossary(request) @@ -1247,8 +1684,12 @@ async def test_update_glossary_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryRequest): +async def test_update_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1259,12 +1700,10 @@ async def test_update_glossary_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_glossary(request) @@ -1282,6 +1721,7 @@ async def test_update_glossary_async(transport: str = 'grpc_asyncio', request_ty async def test_update_glossary_async_from_dict(): await test_update_glossary_async(request_type=dict) + def test_update_glossary_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1291,13 +1731,11 @@ def test_update_glossary_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryRequest() - request.glossary.name = 'name_value' + request.glossary.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1308,9 +1746,9 @@ def test_update_glossary_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'glossary.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "glossary.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1323,13 +1761,13 @@ async def test_update_glossary_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryRequest() - request.glossary.name = 'name_value' + request.glossary.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1340,9 +1778,9 @@ async def test_update_glossary_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'glossary.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "glossary.name=name_value", + ) in kw["metadata"] def test_update_glossary_flattened(): @@ -1351,16 +1789,14 @@ def test_update_glossary_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_glossary( - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -1368,10 +1804,10 @@ def test_update_glossary_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].glossary - mock_val = business_glossary.Glossary(name='name_value') + mock_val = business_glossary.Glossary(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -1385,10 +1821,11 @@ def test_update_glossary_flattened_error(): with pytest.raises(ValueError): client.update_glossary( business_glossary.UpdateGlossaryRequest(), - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_glossary_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1396,20 +1833,18 @@ async def test_update_glossary_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_glossary( - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -1417,12 +1852,13 @@ async def test_update_glossary_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].glossary - mock_val = business_glossary.Glossary(name='name_value') + mock_val = business_glossary.Glossary(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_glossary_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1434,16 +1870,19 @@ async def test_update_glossary_flattened_error_async(): with pytest.raises(ValueError): await client.update_glossary( business_glossary.UpdateGlossaryRequest(), - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryRequest, - dict, -]) -def test_delete_glossary(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryRequest, + dict, + ], +) +def test_delete_glossary(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1454,11 +1893,9 @@ def test_delete_glossary(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1476,30 +1913,31 @@ def test_delete_glossary_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.DeleteGlossaryRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.DeleteGlossaryRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) + def test_delete_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1518,7 +1956,9 @@ def test_delete_glossary_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc request = {} client.delete_glossary(request) @@ -1537,8 +1977,11 @@ def test_delete_glossary_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1552,12 +1995,17 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_glossary in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_glossary + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_glossary] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary + ] = mock_rpc request = {} await client.delete_glossary(request) @@ -1576,8 +2024,12 @@ async def test_delete_glossary_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryRequest): +async def test_delete_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1588,12 +2040,10 @@ async def test_delete_glossary_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_glossary(request) @@ -1611,6 +2061,7 @@ async def test_delete_glossary_async(transport: str = 'grpc_asyncio', request_ty async def test_delete_glossary_async_from_dict(): await test_delete_glossary_async(request_type=dict) + def test_delete_glossary_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1620,13 +2071,11 @@ def test_delete_glossary_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1637,9 +2086,9 @@ def test_delete_glossary_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1652,13 +2101,13 @@ async def test_delete_glossary_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1669,9 +2118,9 @@ async def test_delete_glossary_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_glossary_flattened(): @@ -1680,15 +2129,13 @@ def test_delete_glossary_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_glossary( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1696,7 +2143,7 @@ def test_delete_glossary_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1710,9 +2157,10 @@ def test_delete_glossary_flattened_error(): with pytest.raises(ValueError): client.delete_glossary( business_glossary.DeleteGlossaryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_glossary_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1720,19 +2168,17 @@ async def test_delete_glossary_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_glossary( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1740,9 +2186,10 @@ async def test_delete_glossary_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_glossary_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -1754,15 +2201,18 @@ async def test_delete_glossary_flattened_error_async(): with pytest.raises(ValueError): await client.delete_glossary( business_glossary.DeleteGlossaryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryRequest, - dict, -]) -def test_get_glossary(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1773,18 +2223,16 @@ def test_get_glossary(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.Glossary( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", term_count=1088, category_count=1510, - etag='etag_value', + etag="etag_value", ) response = client.get_glossary(request) @@ -1796,13 +2244,13 @@ def test_get_glossary(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.Glossary) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" assert response.term_count == 1088 assert response.category_count == 1510 - assert response.etag == 'etag_value' + assert response.etag == "etag_value" def test_get_glossary_non_empty_request_with_auto_populated_field(): @@ -1810,28 +2258,29 @@ def test_get_glossary_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.GetGlossaryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_glossary(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.GetGlossaryRequest( - name='name_value', + name="name_value", ) + def test_get_glossary_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1850,7 +2299,9 @@ def test_get_glossary_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc request = {} client.get_glossary(request) @@ -1864,8 +2315,11 @@ def test_get_glossary_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1879,12 +2333,17 @@ async def test_get_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_glossary in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_glossary + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_glossary] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary + ] = mock_rpc request = {} await client.get_glossary(request) @@ -1898,8 +2357,11 @@ async def test_get_glossary_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryRequest): +async def test_get_glossary_async( + transport: str = "grpc_asyncio", request_type=business_glossary.GetGlossaryRequest +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1910,19 +2372,19 @@ async def test_get_glossary_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - term_count=1088, - category_count=1510, - etag='etag_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + ) response = await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -1933,19 +2395,20 @@ async def test_get_glossary_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.Glossary) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" assert response.term_count == 1088 assert response.category_count == 1510 - assert response.etag == 'etag_value' + assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_glossary_async_from_dict(): await test_get_glossary_async(request_type=dict) + def test_get_glossary_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1955,12 +2418,10 @@ def test_get_glossary_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: call.return_value = business_glossary.Glossary() client.get_glossary(request) @@ -1972,9 +2433,9 @@ def test_get_glossary_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1987,13 +2448,13 @@ async def test_get_glossary_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary()) + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary() + ) await client.get_glossary(request) # Establish that the underlying gRPC stub method was called. @@ -2004,9 +2465,9 @@ async def test_get_glossary_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_glossary_flattened(): @@ -2015,15 +2476,13 @@ def test_get_glossary_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.Glossary() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_glossary( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2031,7 +2490,7 @@ def test_get_glossary_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2045,9 +2504,10 @@ def test_get_glossary_flattened_error(): with pytest.raises(ValueError): client.get_glossary( business_glossary.GetGlossaryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_glossary_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2055,17 +2515,17 @@ async def test_get_glossary_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.Glossary() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_glossary( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2073,9 +2533,10 @@ async def test_get_glossary_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_glossary_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2087,15 +2548,18 @@ async def test_get_glossary_flattened_error_async(): with pytest.raises(ValueError): await client.get_glossary( business_glossary.GetGlossaryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossariesRequest, - dict, -]) -def test_list_glossaries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossariesRequest, + dict, + ], +) +def test_list_glossaries(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2106,13 +2570,11 @@ def test_list_glossaries(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossariesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) response = client.list_glossaries(request) @@ -2124,8 +2586,8 @@ def test_list_glossaries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossariesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] def test_list_glossaries_non_empty_request_with_auto_populated_field(): @@ -2133,34 +2595,35 @@ def test_list_glossaries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.ListGlossariesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_glossaries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.ListGlossariesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) + def test_list_glossaries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2179,7 +2642,9 @@ def test_list_glossaries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc request = {} client.list_glossaries(request) @@ -2193,8 +2658,11 @@ def test_list_glossaries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossaries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_glossaries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2208,12 +2676,17 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_glossaries in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_glossaries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_glossaries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_glossaries + ] = mock_rpc request = {} await client.list_glossaries(request) @@ -2227,8 +2700,12 @@ async def test_list_glossaries_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossaries_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossariesRequest): +async def test_list_glossaries_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossariesRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2239,14 +2716,14 @@ async def test_list_glossaries_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) response = await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. @@ -2257,14 +2734,15 @@ async def test_list_glossaries_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossariesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.asyncio async def test_list_glossaries_async_from_dict(): await test_list_glossaries_async(request_type=dict) + def test_list_glossaries_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2274,12 +2752,10 @@ def test_list_glossaries_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossariesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: call.return_value = business_glossary.ListGlossariesResponse() client.list_glossaries(request) @@ -2291,9 +2767,9 @@ def test_list_glossaries_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2306,13 +2782,13 @@ async def test_list_glossaries_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossariesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse()) + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse() + ) await client.list_glossaries(request) # Establish that the underlying gRPC stub method was called. @@ -2323,9 +2799,9 @@ async def test_list_glossaries_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_glossaries_flattened(): @@ -2334,15 +2810,13 @@ def test_list_glossaries_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossariesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_glossaries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2350,7 +2824,7 @@ def test_list_glossaries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2364,9 +2838,10 @@ def test_list_glossaries_flattened_error(): with pytest.raises(ValueError): client.list_glossaries( business_glossary.ListGlossariesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_glossaries_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2374,17 +2849,17 @@ async def test_list_glossaries_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossariesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_glossaries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2392,9 +2867,10 @@ async def test_list_glossaries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_glossaries_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2406,7 +2882,7 @@ async def test_list_glossaries_flattened_error_async(): with pytest.raises(ValueError): await client.list_glossaries( business_glossary.ListGlossariesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -2417,9 +2893,7 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossariesResponse( @@ -2428,17 +2902,17 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): business_glossary.Glossary(), business_glossary.Glossary(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossariesResponse( glossaries=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossariesResponse( glossaries=[ business_glossary.Glossary(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossariesResponse( glossaries=[ @@ -2453,9 +2927,7 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_glossaries(request={}, retry=retry, timeout=timeout) @@ -2465,8 +2937,9 @@ def test_list_glossaries_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.Glossary) - for i in results) + assert all(isinstance(i, business_glossary.Glossary) for i in results) + + def test_list_glossaries_pages(transport_name: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2474,9 +2947,7 @@ def test_list_glossaries_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossariesResponse( @@ -2485,17 +2956,17 @@ def test_list_glossaries_pages(transport_name: str = "grpc"): business_glossary.Glossary(), business_glossary.Glossary(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossariesResponse( glossaries=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossariesResponse( glossaries=[ business_glossary.Glossary(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossariesResponse( glossaries=[ @@ -2506,9 +2977,10 @@ def test_list_glossaries_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_glossaries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_glossaries_async_pager(): client = BusinessGlossaryServiceAsyncClient( @@ -2517,8 +2989,8 @@ async def test_list_glossaries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossaries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossariesResponse( @@ -2527,17 +2999,17 @@ async def test_list_glossaries_async_pager(): business_glossary.Glossary(), business_glossary.Glossary(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossariesResponse( glossaries=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossariesResponse( glossaries=[ business_glossary.Glossary(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossariesResponse( glossaries=[ @@ -2547,15 +3019,16 @@ async def test_list_glossaries_async_pager(): ), RuntimeError, ) - async_pager = await client.list_glossaries(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_glossaries( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, business_glossary.Glossary) - for i in responses) + assert all(isinstance(i, business_glossary.Glossary) for i in responses) @pytest.mark.asyncio @@ -2566,8 +3039,8 @@ async def test_list_glossaries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossaries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossariesResponse( @@ -2576,17 +3049,17 @@ async def test_list_glossaries_async_pages(): business_glossary.Glossary(), business_glossary.Glossary(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossariesResponse( glossaries=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossariesResponse( glossaries=[ business_glossary.Glossary(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossariesResponse( glossaries=[ @@ -2599,18 +3072,22 @@ async def test_list_glossaries_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_glossaries(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryCategoryRequest, - dict, -]) -def test_create_glossary_category(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryCategoryRequest, + dict, + ], +) +def test_create_glossary_category(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2622,15 +3099,15 @@ def test_create_glossary_category(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.create_glossary_category(request) @@ -2642,11 +3119,11 @@ def test_create_glossary_category(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_create_glossary_category_non_empty_request_with_auto_populated_field(): @@ -2654,30 +3131,33 @@ def test_create_glossary_category_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.CreateGlossaryCategoryRequest( - parent='parent_value', - category_id='category_id_value', + parent="parent_value", + category_id="category_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_glossary_category(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.CreateGlossaryCategoryRequest( - parent='parent_value', - category_id='category_id_value', + parent="parent_value", + category_id="category_id_value", ) + def test_create_glossary_category_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2692,12 +3172,19 @@ def test_create_glossary_category_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.create_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_category + ] = mock_rpc request = {} client.create_glossary_category(request) @@ -2710,8 +3197,11 @@ def test_create_glossary_category_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2725,12 +3215,17 @@ async def test_create_glossary_category_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_glossary_category in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_glossary_category + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_glossary_category] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary_category + ] = mock_rpc request = {} await client.create_glossary_category(request) @@ -2744,8 +3239,12 @@ async def test_create_glossary_category_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryCategoryRequest): +async def test_create_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryCategoryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2757,16 +3256,18 @@ async def test_create_glossary_category_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.create_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -2777,17 +3278,18 @@ async def test_create_glossary_category_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_create_glossary_category_async_from_dict(): await test_create_glossary_category_async(request_type=dict) + def test_create_glossary_category_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2797,12 +3299,12 @@ def test_create_glossary_category_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryCategoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.create_glossary_category(request) @@ -2814,9 +3316,9 @@ def test_create_glossary_category_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2829,13 +3331,15 @@ async def test_create_glossary_category_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryCategoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) await client.create_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -2846,9 +3350,9 @@ async def test_create_glossary_category_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_glossary_category_flattened(): @@ -2858,16 +3362,16 @@ def test_create_glossary_category_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_glossary_category( - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) # Establish that the underlying call was made with the expected @@ -2875,13 +3379,13 @@ def test_create_glossary_category_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].category - mock_val = business_glossary.GlossaryCategory(name='name_value') + mock_val = business_glossary.GlossaryCategory(name="name_value") assert arg == mock_val arg = args[0].category_id - mock_val = 'category_id_value' + mock_val = "category_id_value" assert arg == mock_val @@ -2895,11 +3399,12 @@ def test_create_glossary_category_flattened_error(): with pytest.raises(ValueError): client.create_glossary_category( business_glossary.CreateGlossaryCategoryRequest(), - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) + @pytest.mark.asyncio async def test_create_glossary_category_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2908,18 +3413,20 @@ async def test_create_glossary_category_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_glossary_category( - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) # Establish that the underlying call was made with the expected @@ -2927,15 +3434,16 @@ async def test_create_glossary_category_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].category - mock_val = business_glossary.GlossaryCategory(name='name_value') + mock_val = business_glossary.GlossaryCategory(name="name_value") assert arg == mock_val arg = args[0].category_id - mock_val = 'category_id_value' + mock_val = "category_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_glossary_category_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -2947,17 +3455,20 @@ async def test_create_glossary_category_flattened_error_async(): with pytest.raises(ValueError): await client.create_glossary_category( business_glossary.CreateGlossaryCategoryRequest(), - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryCategoryRequest, - dict, -]) -def test_update_glossary_category(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, + ], +) +def test_update_glossary_category(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2969,15 +3480,15 @@ def test_update_glossary_category(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.update_glossary_category(request) @@ -2989,11 +3500,11 @@ def test_update_glossary_category(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_update_glossary_category_non_empty_request_with_auto_populated_field(): @@ -3001,25 +3512,26 @@ def test_update_glossary_category_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = business_glossary.UpdateGlossaryCategoryRequest( - ) + request = business_glossary.UpdateGlossaryCategoryRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_glossary_category(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == business_glossary.UpdateGlossaryCategoryRequest( - ) + assert args[0] == business_glossary.UpdateGlossaryCategoryRequest() + def test_update_glossary_category_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3035,12 +3547,19 @@ def test_update_glossary_category_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.update_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_category + ] = mock_rpc request = {} client.update_glossary_category(request) @@ -3053,8 +3572,11 @@ def test_update_glossary_category_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3068,12 +3590,17 @@ async def test_update_glossary_category_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_glossary_category in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_glossary_category + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_glossary_category] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary_category + ] = mock_rpc request = {} await client.update_glossary_category(request) @@ -3087,8 +3614,12 @@ async def test_update_glossary_category_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryCategoryRequest): +async def test_update_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3100,16 +3631,18 @@ async def test_update_glossary_category_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.update_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -3120,17 +3653,18 @@ async def test_update_glossary_category_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_update_glossary_category_async_from_dict(): await test_update_glossary_category_async(request_type=dict) + def test_update_glossary_category_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3140,12 +3674,12 @@ def test_update_glossary_category_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryCategoryRequest() - request.category.name = 'name_value' + request.category.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.update_glossary_category(request) @@ -3157,9 +3691,9 @@ def test_update_glossary_category_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'category.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "category.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3172,13 +3706,15 @@ async def test_update_glossary_category_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryCategoryRequest() - request.category.name = 'name_value' + request.category.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) await client.update_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -3189,9 +3725,9 @@ async def test_update_glossary_category_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'category.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "category.name=name_value", + ) in kw["metadata"] def test_update_glossary_category_flattened(): @@ -3201,15 +3737,15 @@ def test_update_glossary_category_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_glossary_category( - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -3217,10 +3753,10 @@ def test_update_glossary_category_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].category - mock_val = business_glossary.GlossaryCategory(name='name_value') + mock_val = business_glossary.GlossaryCategory(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -3234,10 +3770,11 @@ def test_update_glossary_category_flattened_error(): with pytest.raises(ValueError): client.update_glossary_category( business_glossary.UpdateGlossaryCategoryRequest(), - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_glossary_category_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3246,17 +3783,19 @@ async def test_update_glossary_category_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_glossary_category( - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -3264,12 +3803,13 @@ async def test_update_glossary_category_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].category - mock_val = business_glossary.GlossaryCategory(name='name_value') + mock_val = business_glossary.GlossaryCategory(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_glossary_category_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3281,16 +3821,19 @@ async def test_update_glossary_category_flattened_error_async(): with pytest.raises(ValueError): await client.update_glossary_category( business_glossary.UpdateGlossaryCategoryRequest(), - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryCategoryRequest, - dict, -]) -def test_delete_glossary_category(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, + ], +) +def test_delete_glossary_category(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3302,8 +3845,8 @@ def test_delete_glossary_category(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_glossary_category(request) @@ -3323,28 +3866,31 @@ def test_delete_glossary_category_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.DeleteGlossaryCategoryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_glossary_category(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.DeleteGlossaryCategoryRequest( - name='name_value', + name="name_value", ) + def test_delete_glossary_category_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3359,12 +3905,19 @@ def test_delete_glossary_category_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.delete_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_category + ] = mock_rpc request = {} client.delete_glossary_category(request) @@ -3377,8 +3930,11 @@ def test_delete_glossary_category_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3392,12 +3948,17 @@ async def test_delete_glossary_category_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_glossary_category in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_glossary_category + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_glossary_category] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary_category + ] = mock_rpc request = {} await client.delete_glossary_category(request) @@ -3411,8 +3972,12 @@ async def test_delete_glossary_category_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryCategoryRequest): +async def test_delete_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3424,8 +3989,8 @@ async def test_delete_glossary_category_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_glossary_category(request) @@ -3444,6 +4009,7 @@ async def test_delete_glossary_category_async(transport: str = 'grpc_asyncio', r async def test_delete_glossary_category_async_from_dict(): await test_delete_glossary_category_async(request_type=dict) + def test_delete_glossary_category_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3453,12 +4019,12 @@ def test_delete_glossary_category_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryCategoryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: call.return_value = None client.delete_glossary_category(request) @@ -3470,9 +4036,9 @@ def test_delete_glossary_category_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3485,12 +4051,12 @@ async def test_delete_glossary_category_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryCategoryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_glossary_category(request) @@ -3502,9 +4068,9 @@ async def test_delete_glossary_category_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_glossary_category_flattened(): @@ -3514,14 +4080,14 @@ def test_delete_glossary_category_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_glossary_category( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3529,7 +4095,7 @@ def test_delete_glossary_category_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3543,9 +4109,10 @@ def test_delete_glossary_category_flattened_error(): with pytest.raises(ValueError): client.delete_glossary_category( business_glossary.DeleteGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_glossary_category_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3554,8 +4121,8 @@ async def test_delete_glossary_category_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3563,7 +4130,7 @@ async def test_delete_glossary_category_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_glossary_category( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3571,9 +4138,10 @@ async def test_delete_glossary_category_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_glossary_category_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3585,15 +4153,18 @@ async def test_delete_glossary_category_flattened_error_async(): with pytest.raises(ValueError): await client.delete_glossary_category( business_glossary.DeleteGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryCategoryRequest, - dict, -]) -def test_get_glossary_category(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryCategoryRequest, + dict, + ], +) +def test_get_glossary_category(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3605,15 +4176,15 @@ def test_get_glossary_category(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.get_glossary_category(request) @@ -3625,11 +4196,11 @@ def test_get_glossary_category(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_get_glossary_category_non_empty_request_with_auto_populated_field(): @@ -3637,28 +4208,31 @@ def test_get_glossary_category_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.GetGlossaryCategoryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_glossary_category(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.GetGlossaryCategoryRequest( - name='name_value', + name="name_value", ) + def test_get_glossary_category_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3673,12 +4247,19 @@ def test_get_glossary_category_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.get_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_category + ] = mock_rpc request = {} client.get_glossary_category(request) @@ -3691,8 +4272,11 @@ def test_get_glossary_category_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_category_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3706,12 +4290,17 @@ async def test_get_glossary_category_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_glossary_category in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_glossary_category + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_glossary_category] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary_category + ] = mock_rpc request = {} await client.get_glossary_category(request) @@ -3725,8 +4314,12 @@ async def test_get_glossary_category_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_category_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryCategoryRequest): +async def test_get_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.GetGlossaryCategoryRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3738,16 +4331,18 @@ async def test_get_glossary_category_async(transport: str = 'grpc_asyncio', requ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.get_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -3758,17 +4353,18 @@ async def test_get_glossary_category_async(transport: str = 'grpc_asyncio', requ # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_get_glossary_category_async_from_dict(): await test_get_glossary_category_async(request_type=dict) + def test_get_glossary_category_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3778,12 +4374,12 @@ def test_get_glossary_category_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryCategoryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.get_glossary_category(request) @@ -3795,9 +4391,9 @@ def test_get_glossary_category_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3810,13 +4406,15 @@ async def test_get_glossary_category_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryCategoryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) await client.get_glossary_category(request) # Establish that the underlying gRPC stub method was called. @@ -3827,9 +4425,9 @@ async def test_get_glossary_category_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_glossary_category_flattened(): @@ -3839,14 +4437,14 @@ def test_get_glossary_category_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_glossary_category( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3854,7 +4452,7 @@ def test_get_glossary_category_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3868,9 +4466,10 @@ def test_get_glossary_category_flattened_error(): with pytest.raises(ValueError): client.get_glossary_category( business_glossary.GetGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_glossary_category_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3879,16 +4478,18 @@ async def test_get_glossary_category_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryCategory() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_glossary_category( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3896,9 +4497,10 @@ async def test_get_glossary_category_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_glossary_category_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -3910,15 +4512,18 @@ async def test_get_glossary_category_flattened_error_async(): with pytest.raises(ValueError): await client.get_glossary_category( business_glossary.GetGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossaryCategoriesRequest, - dict, -]) -def test_list_glossary_categories(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryCategoriesRequest, + dict, + ], +) +def test_list_glossary_categories(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3930,12 +4535,12 @@ def test_list_glossary_categories(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryCategoriesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) response = client.list_glossary_categories(request) @@ -3947,8 +4552,8 @@ def test_list_glossary_categories(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryCategoriesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] def test_list_glossary_categories_non_empty_request_with_auto_populated_field(): @@ -3956,34 +4561,37 @@ def test_list_glossary_categories_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.ListGlossaryCategoriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_glossary_categories(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.ListGlossaryCategoriesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) + def test_list_glossary_categories_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3998,12 +4606,19 @@ def test_list_glossary_categories_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossary_categories in client._transport._wrapped_methods + assert ( + client._transport.list_glossary_categories + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_glossary_categories] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_categories + ] = mock_rpc request = {} client.list_glossary_categories(request) @@ -4016,8 +4631,11 @@ def test_list_glossary_categories_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossary_categories_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_glossary_categories_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4031,12 +4649,17 @@ async def test_list_glossary_categories_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_glossary_categories in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_glossary_categories + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_glossary_categories] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_glossary_categories + ] = mock_rpc request = {} await client.list_glossary_categories(request) @@ -4050,8 +4673,12 @@ async def test_list_glossary_categories_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossary_categories_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossaryCategoriesRequest): +async def test_list_glossary_categories_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossaryCategoriesRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4063,13 +4690,15 @@ async def test_list_glossary_categories_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) response = await client.list_glossary_categories(request) # Establish that the underlying gRPC stub method was called. @@ -4080,14 +4709,15 @@ async def test_list_glossary_categories_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryCategoriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.asyncio async def test_list_glossary_categories_async_from_dict(): await test_list_glossary_categories_async(request_type=dict) + def test_list_glossary_categories_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4097,12 +4727,12 @@ def test_list_glossary_categories_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossaryCategoriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: call.return_value = business_glossary.ListGlossaryCategoriesResponse() client.list_glossary_categories(request) @@ -4114,9 +4744,9 @@ def test_list_glossary_categories_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4129,13 +4759,15 @@ async def test_list_glossary_categories_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossaryCategoriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse()) + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse() + ) await client.list_glossary_categories(request) # Establish that the underlying gRPC stub method was called. @@ -4146,9 +4778,9 @@ async def test_list_glossary_categories_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_glossary_categories_flattened(): @@ -4158,14 +4790,14 @@ def test_list_glossary_categories_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryCategoriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_glossary_categories( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4173,7 +4805,7 @@ def test_list_glossary_categories_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -4187,9 +4819,10 @@ def test_list_glossary_categories_flattened_error(): with pytest.raises(ValueError): client.list_glossary_categories( business_glossary.ListGlossaryCategoriesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_glossary_categories_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -4198,16 +4831,18 @@ async def test_list_glossary_categories_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryCategoriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_glossary_categories( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4215,9 +4850,10 @@ async def test_list_glossary_categories_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_glossary_categories_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -4229,7 +4865,7 @@ async def test_list_glossary_categories_flattened_error_async(): with pytest.raises(ValueError): await client.list_glossary_categories( business_glossary.ListGlossaryCategoriesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -4241,8 +4877,8 @@ def test_list_glossary_categories_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryCategoriesResponse( @@ -4251,17 +4887,17 @@ def test_list_glossary_categories_pager(transport_name: str = "grpc"): business_glossary.GlossaryCategory(), business_glossary.GlossaryCategory(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryCategoriesResponse( categories=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ business_glossary.GlossaryCategory(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ @@ -4276,11 +4912,11 @@ def test_list_glossary_categories_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_glossary_categories( + request={}, retry=retry, timeout=timeout ) - pager = client.list_glossary_categories(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -4288,8 +4924,9 @@ def test_list_glossary_categories_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.GlossaryCategory) - for i in results) + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in results) + + def test_list_glossary_categories_pages(transport_name: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4298,8 +4935,8 @@ def test_list_glossary_categories_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryCategoriesResponse( @@ -4308,17 +4945,17 @@ def test_list_glossary_categories_pages(transport_name: str = "grpc"): business_glossary.GlossaryCategory(), business_glossary.GlossaryCategory(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryCategoriesResponse( categories=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ business_glossary.GlossaryCategory(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ @@ -4329,9 +4966,10 @@ def test_list_glossary_categories_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_glossary_categories(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_glossary_categories_async_pager(): client = BusinessGlossaryServiceAsyncClient( @@ -4340,8 +4978,10 @@ async def test_list_glossary_categories_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossary_categories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryCategoriesResponse( @@ -4350,17 +4990,17 @@ async def test_list_glossary_categories_async_pager(): business_glossary.GlossaryCategory(), business_glossary.GlossaryCategory(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryCategoriesResponse( categories=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ business_glossary.GlossaryCategory(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ @@ -4370,15 +5010,16 @@ async def test_list_glossary_categories_async_pager(): ), RuntimeError, ) - async_pager = await client.list_glossary_categories(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_glossary_categories( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, business_glossary.GlossaryCategory) - for i in responses) + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in responses) @pytest.mark.asyncio @@ -4389,8 +5030,10 @@ async def test_list_glossary_categories_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossary_categories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryCategoriesResponse( @@ -4399,17 +5042,17 @@ async def test_list_glossary_categories_async_pages(): business_glossary.GlossaryCategory(), business_glossary.GlossaryCategory(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryCategoriesResponse( categories=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ business_glossary.GlossaryCategory(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ @@ -4422,18 +5065,22 @@ async def test_list_glossary_categories_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_glossary_categories(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryTermRequest, - dict, -]) -def test_create_glossary_term(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryTermRequest, + dict, + ], +) +def test_create_glossary_term(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4445,15 +5092,15 @@ def test_create_glossary_term(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.create_glossary_term(request) @@ -4465,11 +5112,11 @@ def test_create_glossary_term(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_create_glossary_term_non_empty_request_with_auto_populated_field(): @@ -4477,30 +5124,33 @@ def test_create_glossary_term_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.CreateGlossaryTermRequest( - parent='parent_value', - term_id='term_id_value', + parent="parent_value", + term_id="term_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_glossary_term(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.CreateGlossaryTermRequest( - parent='parent_value', - term_id='term_id_value', + parent="parent_value", + term_id="term_id_value", ) + def test_create_glossary_term_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4515,12 +5165,18 @@ def test_create_glossary_term_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.create_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_term + ] = mock_rpc request = {} client.create_glossary_term(request) @@ -4533,8 +5189,11 @@ def test_create_glossary_term_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4548,12 +5207,17 @@ async def test_create_glossary_term_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_glossary_term in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_glossary_term + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_glossary_term] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary_term + ] = mock_rpc request = {} await client.create_glossary_term(request) @@ -4567,8 +5231,12 @@ async def test_create_glossary_term_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.CreateGlossaryTermRequest): +async def test_create_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryTermRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4580,16 +5248,18 @@ async def test_create_glossary_term_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.create_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -4600,17 +5270,18 @@ async def test_create_glossary_term_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_create_glossary_term_async_from_dict(): await test_create_glossary_term_async(request_type=dict) + def test_create_glossary_term_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4620,12 +5291,12 @@ def test_create_glossary_term_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryTermRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.create_glossary_term(request) @@ -4637,9 +5308,9 @@ def test_create_glossary_term_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4652,13 +5323,15 @@ async def test_create_glossary_term_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.CreateGlossaryTermRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) await client.create_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -4669,9 +5342,9 @@ async def test_create_glossary_term_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_glossary_term_flattened(): @@ -4681,16 +5354,16 @@ def test_create_glossary_term_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_glossary_term( - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) # Establish that the underlying call was made with the expected @@ -4698,13 +5371,13 @@ def test_create_glossary_term_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].term - mock_val = business_glossary.GlossaryTerm(name='name_value') + mock_val = business_glossary.GlossaryTerm(name="name_value") assert arg == mock_val arg = args[0].term_id - mock_val = 'term_id_value' + mock_val = "term_id_value" assert arg == mock_val @@ -4718,11 +5391,12 @@ def test_create_glossary_term_flattened_error(): with pytest.raises(ValueError): client.create_glossary_term( business_glossary.CreateGlossaryTermRequest(), - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) + @pytest.mark.asyncio async def test_create_glossary_term_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -4731,18 +5405,20 @@ async def test_create_glossary_term_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_glossary_term( - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) # Establish that the underlying call was made with the expected @@ -4750,15 +5426,16 @@ async def test_create_glossary_term_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].term - mock_val = business_glossary.GlossaryTerm(name='name_value') + mock_val = business_glossary.GlossaryTerm(name="name_value") assert arg == mock_val arg = args[0].term_id - mock_val = 'term_id_value' + mock_val = "term_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_glossary_term_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -4770,17 +5447,20 @@ async def test_create_glossary_term_flattened_error_async(): with pytest.raises(ValueError): await client.create_glossary_term( business_glossary.CreateGlossaryTermRequest(), - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryTermRequest, - dict, -]) -def test_update_glossary_term(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryTermRequest, + dict, + ], +) +def test_update_glossary_term(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4792,15 +5472,15 @@ def test_update_glossary_term(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.update_glossary_term(request) @@ -4812,11 +5492,11 @@ def test_update_glossary_term(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_update_glossary_term_non_empty_request_with_auto_populated_field(): @@ -4824,25 +5504,26 @@ def test_update_glossary_term_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = business_glossary.UpdateGlossaryTermRequest( - ) + request = business_glossary.UpdateGlossaryTermRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_glossary_term(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == business_glossary.UpdateGlossaryTermRequest( - ) + assert args[0] == business_glossary.UpdateGlossaryTermRequest() + def test_update_glossary_term_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4858,12 +5539,18 @@ def test_update_glossary_term_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.update_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_term + ] = mock_rpc request = {} client.update_glossary_term(request) @@ -4876,8 +5563,11 @@ def test_update_glossary_term_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4891,12 +5581,17 @@ async def test_update_glossary_term_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_glossary_term in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_glossary_term + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_glossary_term] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary_term + ] = mock_rpc request = {} await client.update_glossary_term(request) @@ -4910,8 +5605,12 @@ async def test_update_glossary_term_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.UpdateGlossaryTermRequest): +async def test_update_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryTermRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4923,16 +5622,18 @@ async def test_update_glossary_term_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.update_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -4943,17 +5644,18 @@ async def test_update_glossary_term_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_update_glossary_term_async_from_dict(): await test_update_glossary_term_async(request_type=dict) + def test_update_glossary_term_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4963,12 +5665,12 @@ def test_update_glossary_term_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryTermRequest() - request.term.name = 'name_value' + request.term.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.update_glossary_term(request) @@ -4980,9 +5682,9 @@ def test_update_glossary_term_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'term.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "term.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4995,13 +5697,15 @@ async def test_update_glossary_term_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.UpdateGlossaryTermRequest() - request.term.name = 'name_value' + request.term.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) await client.update_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -5012,9 +5716,9 @@ async def test_update_glossary_term_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'term.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "term.name=name_value", + ) in kw["metadata"] def test_update_glossary_term_flattened(): @@ -5024,15 +5728,15 @@ def test_update_glossary_term_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_glossary_term( - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -5040,10 +5744,10 @@ def test_update_glossary_term_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].term - mock_val = business_glossary.GlossaryTerm(name='name_value') + mock_val = business_glossary.GlossaryTerm(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -5057,10 +5761,11 @@ def test_update_glossary_term_flattened_error(): with pytest.raises(ValueError): client.update_glossary_term( business_glossary.UpdateGlossaryTermRequest(), - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_glossary_term_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5069,17 +5774,19 @@ async def test_update_glossary_term_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_glossary_term( - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -5087,12 +5794,13 @@ async def test_update_glossary_term_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].term - mock_val = business_glossary.GlossaryTerm(name='name_value') + mock_val = business_glossary.GlossaryTerm(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_glossary_term_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5104,16 +5812,19 @@ async def test_update_glossary_term_flattened_error_async(): with pytest.raises(ValueError): await client.update_glossary_term( business_glossary.UpdateGlossaryTermRequest(), - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryTermRequest, - dict, -]) -def test_delete_glossary_term(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryTermRequest, + dict, + ], +) +def test_delete_glossary_term(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5125,8 +5836,8 @@ def test_delete_glossary_term(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_glossary_term(request) @@ -5146,28 +5857,31 @@ def test_delete_glossary_term_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.DeleteGlossaryTermRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_glossary_term(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.DeleteGlossaryTermRequest( - name='name_value', + name="name_value", ) + def test_delete_glossary_term_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5182,12 +5896,18 @@ def test_delete_glossary_term_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.delete_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_term + ] = mock_rpc request = {} client.delete_glossary_term(request) @@ -5200,8 +5920,11 @@ def test_delete_glossary_term_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5215,12 +5938,17 @@ async def test_delete_glossary_term_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_glossary_term in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_glossary_term + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_glossary_term] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary_term + ] = mock_rpc request = {} await client.delete_glossary_term(request) @@ -5234,8 +5962,12 @@ async def test_delete_glossary_term_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.DeleteGlossaryTermRequest): +async def test_delete_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryTermRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5247,8 +5979,8 @@ async def test_delete_glossary_term_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_glossary_term(request) @@ -5267,6 +5999,7 @@ async def test_delete_glossary_term_async(transport: str = 'grpc_asyncio', reque async def test_delete_glossary_term_async_from_dict(): await test_delete_glossary_term_async(request_type=dict) + def test_delete_glossary_term_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5276,12 +6009,12 @@ def test_delete_glossary_term_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryTermRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: call.return_value = None client.delete_glossary_term(request) @@ -5293,9 +6026,9 @@ def test_delete_glossary_term_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5308,12 +6041,12 @@ async def test_delete_glossary_term_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.DeleteGlossaryTermRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_glossary_term(request) @@ -5325,9 +6058,9 @@ async def test_delete_glossary_term_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_glossary_term_flattened(): @@ -5337,14 +6070,14 @@ def test_delete_glossary_term_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_glossary_term( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5352,7 +6085,7 @@ def test_delete_glossary_term_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -5366,9 +6099,10 @@ def test_delete_glossary_term_flattened_error(): with pytest.raises(ValueError): client.delete_glossary_term( business_glossary.DeleteGlossaryTermRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_glossary_term_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5377,8 +6111,8 @@ async def test_delete_glossary_term_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5386,7 +6120,7 @@ async def test_delete_glossary_term_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_glossary_term( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5394,9 +6128,10 @@ async def test_delete_glossary_term_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_glossary_term_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5408,15 +6143,18 @@ async def test_delete_glossary_term_flattened_error_async(): with pytest.raises(ValueError): await client.delete_glossary_term( business_glossary.DeleteGlossaryTermRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryTermRequest, - dict, -]) -def test_get_glossary_term(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryTermRequest, + dict, + ], +) +def test_get_glossary_term(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5428,15 +6166,15 @@ def test_get_glossary_term(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) response = client.get_glossary_term(request) @@ -5448,11 +6186,11 @@ def test_get_glossary_term(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" def test_get_glossary_term_non_empty_request_with_auto_populated_field(): @@ -5460,28 +6198,31 @@ def test_get_glossary_term_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.GetGlossaryTermRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_glossary_term(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.GetGlossaryTermRequest( - name='name_value', + name="name_value", ) + def test_get_glossary_term_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5500,8 +6241,12 @@ def test_get_glossary_term_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_term + ] = mock_rpc request = {} client.get_glossary_term(request) @@ -5514,8 +6259,11 @@ def test_get_glossary_term_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_term_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5529,12 +6277,17 @@ async def test_get_glossary_term_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_glossary_term in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_glossary_term + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_glossary_term] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary_term + ] = mock_rpc request = {} await client.get_glossary_term(request) @@ -5548,8 +6301,12 @@ async def test_get_glossary_term_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_glossary_term_async(transport: str = 'grpc_asyncio', request_type=business_glossary.GetGlossaryTermRequest): +async def test_get_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.GetGlossaryTermRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5561,16 +6318,18 @@ async def test_get_glossary_term_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) response = await client.get_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -5581,17 +6340,18 @@ async def test_get_glossary_term_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.asyncio async def test_get_glossary_term_async_from_dict(): await test_get_glossary_term_async(request_type=dict) + def test_get_glossary_term_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5601,12 +6361,12 @@ def test_get_glossary_term_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryTermRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.get_glossary_term(request) @@ -5618,9 +6378,9 @@ def test_get_glossary_term_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5633,13 +6393,15 @@ async def test_get_glossary_term_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.GetGlossaryTermRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) await client.get_glossary_term(request) # Establish that the underlying gRPC stub method was called. @@ -5650,9 +6412,9 @@ async def test_get_glossary_term_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_glossary_term_flattened(): @@ -5662,14 +6424,14 @@ def test_get_glossary_term_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_glossary_term( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5677,7 +6439,7 @@ def test_get_glossary_term_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -5691,9 +6453,10 @@ def test_get_glossary_term_flattened_error(): with pytest.raises(ValueError): client.get_glossary_term( business_glossary.GetGlossaryTermRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_glossary_term_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5702,16 +6465,18 @@ async def test_get_glossary_term_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.GlossaryTerm() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_glossary_term( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5719,9 +6484,10 @@ async def test_get_glossary_term_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_glossary_term_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -5733,15 +6499,18 @@ async def test_get_glossary_term_flattened_error_async(): with pytest.raises(ValueError): await client.get_glossary_term( business_glossary.GetGlossaryTermRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossaryTermsRequest, - dict, -]) -def test_list_glossary_terms(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryTermsRequest, + dict, + ], +) +def test_list_glossary_terms(request_type, transport: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5753,12 +6522,12 @@ def test_list_glossary_terms(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryTermsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) response = client.list_glossary_terms(request) @@ -5770,8 +6539,8 @@ def test_list_glossary_terms(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryTermsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] def test_list_glossary_terms_non_empty_request_with_auto_populated_field(): @@ -5779,34 +6548,37 @@ def test_list_glossary_terms_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = business_glossary.ListGlossaryTermsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_glossary_terms(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == business_glossary.ListGlossaryTermsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) + def test_list_glossary_terms_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5821,12 +6593,18 @@ def test_list_glossary_terms_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossary_terms in client._transport._wrapped_methods + assert ( + client._transport.list_glossary_terms in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_glossary_terms] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_terms + ] = mock_rpc request = {} client.list_glossary_terms(request) @@ -5839,8 +6617,11 @@ def test_list_glossary_terms_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossary_terms_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_glossary_terms_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5854,12 +6635,17 @@ async def test_list_glossary_terms_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_glossary_terms in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_glossary_terms + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_glossary_terms] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_glossary_terms + ] = mock_rpc request = {} await client.list_glossary_terms(request) @@ -5873,8 +6659,12 @@ async def test_list_glossary_terms_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_glossary_terms_async(transport: str = 'grpc_asyncio', request_type=business_glossary.ListGlossaryTermsRequest): +async def test_list_glossary_terms_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossaryTermsRequest, +): client = BusinessGlossaryServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5886,13 +6676,15 @@ async def test_list_glossary_terms_async(transport: str = 'grpc_asyncio', reques # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) response = await client.list_glossary_terms(request) # Establish that the underlying gRPC stub method was called. @@ -5903,14 +6695,15 @@ async def test_list_glossary_terms_async(transport: str = 'grpc_asyncio', reques # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryTermsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.asyncio async def test_list_glossary_terms_async_from_dict(): await test_list_glossary_terms_async(request_type=dict) + def test_list_glossary_terms_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5920,12 +6713,12 @@ def test_list_glossary_terms_field_headers(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossaryTermsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: call.return_value = business_glossary.ListGlossaryTermsResponse() client.list_glossary_terms(request) @@ -5937,9 +6730,9 @@ def test_list_glossary_terms_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5952,13 +6745,15 @@ async def test_list_glossary_terms_field_headers_async(): # a field header. Set these to a non-empty value. request = business_glossary.ListGlossaryTermsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse()) + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse() + ) await client.list_glossary_terms(request) # Establish that the underlying gRPC stub method was called. @@ -5969,9 +6764,9 @@ async def test_list_glossary_terms_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_glossary_terms_flattened(): @@ -5981,14 +6776,14 @@ def test_list_glossary_terms_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryTermsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_glossary_terms( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -5996,7 +6791,7 @@ def test_list_glossary_terms_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6010,9 +6805,10 @@ def test_list_glossary_terms_flattened_error(): with pytest.raises(ValueError): client.list_glossary_terms( business_glossary.ListGlossaryTermsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_glossary_terms_flattened_async(): client = BusinessGlossaryServiceAsyncClient( @@ -6021,16 +6817,18 @@ async def test_list_glossary_terms_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = business_glossary.ListGlossaryTermsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_glossary_terms( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6038,9 +6836,10 @@ async def test_list_glossary_terms_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_glossary_terms_flattened_error_async(): client = BusinessGlossaryServiceAsyncClient( @@ -6052,7 +6851,7 @@ async def test_list_glossary_terms_flattened_error_async(): with pytest.raises(ValueError): await client.list_glossary_terms( business_glossary.ListGlossaryTermsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6064,8 +6863,8 @@ def test_list_glossary_terms_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryTermsResponse( @@ -6074,17 +6873,17 @@ def test_list_glossary_terms_pager(transport_name: str = "grpc"): business_glossary.GlossaryTerm(), business_glossary.GlossaryTerm(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryTermsResponse( terms=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryTermsResponse( terms=[ business_glossary.GlossaryTerm(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryTermsResponse( terms=[ @@ -6099,9 +6898,7 @@ def test_list_glossary_terms_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_glossary_terms(request={}, retry=retry, timeout=timeout) @@ -6111,8 +6908,9 @@ def test_list_glossary_terms_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.GlossaryTerm) - for i in results) + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in results) + + def test_list_glossary_terms_pages(transport_name: str = "grpc"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6121,8 +6919,8 @@ def test_list_glossary_terms_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryTermsResponse( @@ -6131,17 +6929,17 @@ def test_list_glossary_terms_pages(transport_name: str = "grpc"): business_glossary.GlossaryTerm(), business_glossary.GlossaryTerm(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryTermsResponse( terms=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryTermsResponse( terms=[ business_glossary.GlossaryTerm(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryTermsResponse( terms=[ @@ -6152,9 +6950,10 @@ def test_list_glossary_terms_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_glossary_terms(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_glossary_terms_async_pager(): client = BusinessGlossaryServiceAsyncClient( @@ -6163,8 +6962,10 @@ async def test_list_glossary_terms_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossary_terms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryTermsResponse( @@ -6173,17 +6974,17 @@ async def test_list_glossary_terms_async_pager(): business_glossary.GlossaryTerm(), business_glossary.GlossaryTerm(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryTermsResponse( terms=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryTermsResponse( terms=[ business_glossary.GlossaryTerm(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryTermsResponse( terms=[ @@ -6193,15 +6994,16 @@ async def test_list_glossary_terms_async_pager(): ), RuntimeError, ) - async_pager = await client.list_glossary_terms(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_glossary_terms( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, business_glossary.GlossaryTerm) - for i in responses) + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in responses) @pytest.mark.asyncio @@ -6212,8 +7014,10 @@ async def test_list_glossary_terms_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_glossary_terms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( business_glossary.ListGlossaryTermsResponse( @@ -6222,17 +7026,17 @@ async def test_list_glossary_terms_async_pages(): business_glossary.GlossaryTerm(), business_glossary.GlossaryTerm(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryTermsResponse( terms=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryTermsResponse( terms=[ business_glossary.GlossaryTerm(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryTermsResponse( terms=[ @@ -6245,11 +7049,11 @@ async def test_list_glossary_terms_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_glossary_terms(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6271,7 +7075,9 @@ def test_create_glossary_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc request = {} @@ -6291,7 +7097,9 @@ def test_create_glossary_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_glossary_rest_required_fields(request_type=business_glossary.CreateGlossaryRequest): +def test_create_glossary_rest_required_fields( + request_type=business_glossary.CreateGlossaryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} @@ -6299,65 +7107,73 @@ def test_create_glossary_rest_required_fields(request_type=business_glossary.Cre request_init["glossary_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "glossaryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "glossaryId" in jsonified_request assert jsonified_request["glossaryId"] == request_init["glossary_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["glossaryId"] = 'glossary_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["glossaryId"] = "glossary_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("glossary_id", "validate_only", )) + assert not set(unset_fields) - set( + ( + "glossary_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "glossaryId" in jsonified_request - assert jsonified_request["glossaryId"] == 'glossary_id_value' + assert jsonified_request["glossaryId"] == "glossary_id_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6368,17 +7184,33 @@ def test_create_glossary_rest_required_fields(request_type=business_glossary.Cre "glossaryId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_glossary_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(("glossaryId", "validateOnly", )) & set(("parent", "glossaryId", "glossary", ))) + assert set(unset_fields) == ( + set( + ( + "glossaryId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "glossaryId", + "glossary", + ) + ) + ) def test_create_glossary_rest_flattened(): @@ -6388,18 +7220,18 @@ def test_create_glossary_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) mock_args.update(sample_request) @@ -6407,7 +7239,7 @@ def test_create_glossary_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6417,10 +7249,13 @@ def test_create_glossary_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) -def test_create_glossary_rest_flattened_error(transport: str = 'rest'): +def test_create_glossary_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6431,9 +7266,9 @@ def test_create_glossary_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_glossary( business_glossary.CreateGlossaryRequest(), - parent='parent_value', - glossary=business_glossary.Glossary(name='name_value'), - glossary_id='glossary_id_value', + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", ) @@ -6455,7 +7290,9 @@ def test_update_glossary_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc request = {} @@ -6475,78 +7312,101 @@ def test_update_glossary_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_glossary_rest_required_fields(request_type=business_glossary.UpdateGlossaryRequest): +def test_update_glossary_rest_required_fields( + request_type=business_glossary.UpdateGlossaryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_glossary_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("glossary", "updateMask", ))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "glossary", + "updateMask", + ) + ) + ) def test_update_glossary_rest_flattened(): @@ -6556,17 +7416,21 @@ def test_update_glossary_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} + sample_request = { + "glossary": { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -6574,7 +7438,7 @@ def test_update_glossary_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6584,10 +7448,14 @@ def test_update_glossary_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{glossary.name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{glossary.name=projects/*/locations/*/glossaries/*}" + % client.transport._host, + args[1], + ) -def test_update_glossary_rest_flattened_error(transport: str = 'rest'): +def test_update_glossary_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6598,8 +7466,8 @@ def test_update_glossary_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_glossary( business_glossary.UpdateGlossaryRequest(), - glossary=business_glossary.Glossary(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6621,7 +7489,9 @@ def test_delete_glossary_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc request = {} @@ -6641,57 +7511,62 @@ def test_delete_glossary_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_glossary_rest_required_fields(request_type=business_glossary.DeleteGlossaryRequest): +def test_delete_glossary_rest_required_fields( + request_type=business_glossary.DeleteGlossaryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6699,24 +7574,24 @@ def test_delete_glossary_rest_required_fields(request_type=business_glossary.Del response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_glossary_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) def test_delete_glossary_rest_flattened(): @@ -6726,16 +7601,18 @@ def test_delete_glossary_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -6743,7 +7620,7 @@ def test_delete_glossary_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6753,10 +7630,13 @@ def test_delete_glossary_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) -def test_delete_glossary_rest_flattened_error(transport: str = 'rest'): +def test_delete_glossary_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6767,7 +7647,7 @@ def test_delete_glossary_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_glossary( business_glossary.DeleteGlossaryRequest(), - name='name_value', + name="name_value", ) @@ -6789,7 +7669,9 @@ def test_get_glossary_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc request = {} @@ -6805,55 +7687,60 @@ def test_get_glossary_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_glossary_rest_required_fields(request_type=business_glossary.GetGlossaryRequest): +def test_get_glossary_rest_required_fields( + request_type=business_glossary.GetGlossaryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.Glossary() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6864,24 +7751,24 @@ def test_get_glossary_rest_required_fields(request_type=business_glossary.GetGlo return_value = business_glossary.Glossary.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_glossary_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_glossary_rest_flattened(): @@ -6891,16 +7778,18 @@ def test_get_glossary_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.Glossary() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -6910,7 +7799,7 @@ def test_get_glossary_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.Glossary.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6920,10 +7809,13 @@ def test_get_glossary_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) -def test_get_glossary_rest_flattened_error(transport: str = 'rest'): +def test_get_glossary_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6934,7 +7826,7 @@ def test_get_glossary_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_glossary( business_glossary.GetGlossaryRequest(), - name='name_value', + name="name_value", ) @@ -6956,7 +7848,9 @@ def test_list_glossaries_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc request = {} @@ -6972,57 +7866,69 @@ def test_list_glossaries_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_glossaries_rest_required_fields(request_type=business_glossary.ListGlossariesRequest): +def test_list_glossaries_rest_required_fields( + request_type=business_glossary.ListGlossariesRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossaries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossaries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossariesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7033,24 +7939,34 @@ def test_list_glossaries_rest_required_fields(request_type=business_glossary.Lis return_value = business_glossary.ListGlossariesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossaries(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_glossaries_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_glossaries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_glossaries_rest_flattened(): @@ -7060,16 +7976,16 @@ def test_list_glossaries_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossariesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7079,7 +7995,7 @@ def test_list_glossaries_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.ListGlossariesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7089,10 +8005,13 @@ def test_list_glossaries_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) -def test_list_glossaries_rest_flattened_error(transport: str = 'rest'): +def test_list_glossaries_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7103,20 +8022,20 @@ def test_list_glossaries_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_glossaries( business_glossary.ListGlossariesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_glossaries_rest_pager(transport: str = 'rest'): +def test_list_glossaries_rest_pager(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( business_glossary.ListGlossariesResponse( @@ -7125,17 +8044,17 @@ def test_list_glossaries_rest_pager(transport: str = 'rest'): business_glossary.Glossary(), business_glossary.Glossary(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossariesResponse( glossaries=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossariesResponse( glossaries=[ business_glossary.Glossary(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossariesResponse( glossaries=[ @@ -7148,24 +8067,25 @@ def test_list_glossaries_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(business_glossary.ListGlossariesResponse.to_json(x) for x in response) + response = tuple( + business_glossary.ListGlossariesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_glossaries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.Glossary) - for i in results) + assert all(isinstance(i, business_glossary.Glossary) for i in results) pages = list(client.list_glossaries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -7183,12 +8103,19 @@ def test_create_glossary_category_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.create_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_category + ] = mock_rpc request = {} client.create_glossary_category(request) @@ -7203,7 +8130,9 @@ def test_create_glossary_category_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_glossary_category_rest_required_fields(request_type=business_glossary.CreateGlossaryCategoryRequest): +def test_create_glossary_category_rest_required_fields( + request_type=business_glossary.CreateGlossaryCategoryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} @@ -7211,58 +8140,61 @@ def test_create_glossary_category_rest_required_fields(request_type=business_glo request_init["category_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "categoryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "categoryId" in jsonified_request assert jsonified_request["categoryId"] == request_init["category_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["categoryId"] = 'category_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["categoryId"] = "category_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_category._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("category_id", )) + assert not set(unset_fields) - set(("category_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "categoryId" in jsonified_request - assert jsonified_request["categoryId"] == 'category_id_value' + assert jsonified_request["categoryId"] == "category_id_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -7272,7 +8204,7 @@ def test_create_glossary_category_rest_required_fields(request_type=business_glo return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7283,17 +8215,28 @@ def test_create_glossary_category_rest_required_fields(request_type=business_glo "categoryId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_glossary_category_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_glossary_category._get_unset_required_fields({}) - assert set(unset_fields) == (set(("categoryId", )) & set(("parent", "categoryId", "category", ))) + assert set(unset_fields) == ( + set(("categoryId",)) + & set( + ( + "parent", + "categoryId", + "category", + ) + ) + ) def test_create_glossary_category_rest_flattened(): @@ -7303,18 +8246,20 @@ def test_create_glossary_category_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) mock_args.update(sample_request) @@ -7324,7 +8269,7 @@ def test_create_glossary_category_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7334,10 +8279,14 @@ def test_create_glossary_category_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" + % client.transport._host, + args[1], + ) -def test_create_glossary_category_rest_flattened_error(transport: str = 'rest'): +def test_create_glossary_category_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7348,9 +8297,9 @@ def test_create_glossary_category_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_glossary_category( business_glossary.CreateGlossaryCategoryRequest(), - parent='parent_value', - category=business_glossary.GlossaryCategory(name='name_value'), - category_id='category_id_value', + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", ) @@ -7368,12 +8317,19 @@ def test_update_glossary_category_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.update_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_category + ] = mock_rpc request = {} client.update_glossary_category(request) @@ -7388,54 +8344,59 @@ def test_update_glossary_category_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_glossary_category_rest_required_fields(request_type=business_glossary.UpdateGlossaryCategoryRequest): +def test_update_glossary_category_rest_required_fields( + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_category._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -7445,24 +8406,32 @@ def test_update_glossary_category_rest_required_fields(request_type=business_glo return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary_category(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_glossary_category_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_glossary_category._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("category", "updateMask", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "category", + "updateMask", + ) + ) + ) def test_update_glossary_category_rest_flattened(): @@ -7472,17 +8441,21 @@ def test_update_glossary_category_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # get arguments that satisfy an http rule for this method - sample_request = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} + sample_request = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -7492,7 +8465,7 @@ def test_update_glossary_category_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7502,10 +8475,14 @@ def test_update_glossary_category_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) -def test_update_glossary_category_rest_flattened_error(transport: str = 'rest'): +def test_update_glossary_category_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7516,8 +8493,8 @@ def test_update_glossary_category_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_glossary_category( business_glossary.UpdateGlossaryCategoryRequest(), - category=business_glossary.GlossaryCategory(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7535,12 +8512,19 @@ def test_delete_glossary_category_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.delete_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_category + ] = mock_rpc request = {} client.delete_glossary_category(request) @@ -7555,80 +8539,85 @@ def test_delete_glossary_category_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_glossary_category_rest_required_fields(request_type=business_glossary.DeleteGlossaryCategoryRequest): +def test_delete_glossary_category_rest_required_fields( + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary_category(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_glossary_category_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_glossary_category._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_glossary_category_rest_flattened(): @@ -7638,24 +8627,26 @@ def test_delete_glossary_category_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7665,10 +8656,14 @@ def test_delete_glossary_category_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) -def test_delete_glossary_category_rest_flattened_error(transport: str = 'rest'): +def test_delete_glossary_category_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7679,7 +8674,7 @@ def test_delete_glossary_category_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_glossary_category( business_glossary.DeleteGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) @@ -7697,12 +8692,19 @@ def test_get_glossary_category_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_glossary_category in client._transport._wrapped_methods + assert ( + client._transport.get_glossary_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_glossary_category] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_category + ] = mock_rpc request = {} client.get_glossary_category(request) @@ -7717,55 +8719,60 @@ def test_get_glossary_category_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_glossary_category_rest_required_fields(request_type=business_glossary.GetGlossaryCategoryRequest): +def test_get_glossary_category_rest_required_fields( + request_type=business_glossary.GetGlossaryCategoryRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_category._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7776,24 +8783,24 @@ def test_get_glossary_category_rest_required_fields(request_type=business_glossa return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary_category(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_glossary_category_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_glossary_category._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_glossary_category_rest_flattened(): @@ -7803,16 +8810,18 @@ def test_get_glossary_category_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -7822,7 +8831,7 @@ def test_get_glossary_category_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7832,10 +8841,14 @@ def test_get_glossary_category_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) -def test_get_glossary_category_rest_flattened_error(transport: str = 'rest'): +def test_get_glossary_category_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7846,7 +8859,7 @@ def test_get_glossary_category_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_glossary_category( business_glossary.GetGlossaryCategoryRequest(), - name='name_value', + name="name_value", ) @@ -7864,12 +8877,19 @@ def test_list_glossary_categories_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossary_categories in client._transport._wrapped_methods + assert ( + client._transport.list_glossary_categories + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_glossary_categories] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_categories + ] = mock_rpc request = {} client.list_glossary_categories(request) @@ -7884,57 +8904,69 @@ def test_list_glossary_categories_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_glossary_categories_rest_required_fields(request_type=business_glossary.ListGlossaryCategoriesRequest): +def test_list_glossary_categories_rest_required_fields( + request_type=business_glossary.ListGlossaryCategoriesRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_categories._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_categories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_categories._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_categories._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryCategoriesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7942,27 +8974,39 @@ def test_list_glossary_categories_rest_required_fields(request_type=business_glo response_value.status_code = 200 # Convert return value to protobuf type - return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + return_value = business_glossary.ListGlossaryCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossary_categories(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_glossary_categories_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_glossary_categories._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_glossary_categories_rest_flattened(): @@ -7972,16 +9016,18 @@ def test_list_glossary_categories_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryCategoriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7991,7 +9037,7 @@ def test_list_glossary_categories_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8001,10 +9047,14 @@ def test_list_glossary_categories_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" + % client.transport._host, + args[1], + ) -def test_list_glossary_categories_rest_flattened_error(transport: str = 'rest'): +def test_list_glossary_categories_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8015,20 +9065,20 @@ def test_list_glossary_categories_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_glossary_categories( business_glossary.ListGlossaryCategoriesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_glossary_categories_rest_pager(transport: str = 'rest'): +def test_list_glossary_categories_rest_pager(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( business_glossary.ListGlossaryCategoriesResponse( @@ -8037,17 +9087,17 @@ def test_list_glossary_categories_rest_pager(transport: str = 'rest'): business_glossary.GlossaryCategory(), business_glossary.GlossaryCategory(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryCategoriesResponse( categories=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ business_glossary.GlossaryCategory(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryCategoriesResponse( categories=[ @@ -8060,24 +9110,28 @@ def test_list_glossary_categories_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(business_glossary.ListGlossaryCategoriesResponse.to_json(x) for x in response) + response = tuple( + business_glossary.ListGlossaryCategoriesResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } pager = client.list_glossary_categories(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.GlossaryCategory) - for i in results) + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in results) pages = list(client.list_glossary_categories(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -8095,12 +9149,18 @@ def test_create_glossary_term_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.create_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_term + ] = mock_rpc request = {} client.create_glossary_term(request) @@ -8115,7 +9175,9 @@ def test_create_glossary_term_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_glossary_term_rest_required_fields(request_type=business_glossary.CreateGlossaryTermRequest): +def test_create_glossary_term_rest_required_fields( + request_type=business_glossary.CreateGlossaryTermRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} @@ -8123,58 +9185,61 @@ def test_create_glossary_term_rest_required_fields(request_type=business_glossar request_init["term_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "termId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "termId" in jsonified_request assert jsonified_request["termId"] == request_init["term_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["termId"] = 'term_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["termId"] = "term_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_term._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("term_id", )) + assert not set(unset_fields) - set(("term_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "termId" in jsonified_request - assert jsonified_request["termId"] == 'term_id_value' + assert jsonified_request["termId"] == "term_id_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8184,7 +9249,7 @@ def test_create_glossary_term_rest_required_fields(request_type=business_glossar return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8195,17 +9260,28 @@ def test_create_glossary_term_rest_required_fields(request_type=business_glossar "termId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_glossary_term_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_glossary_term._get_unset_required_fields({}) - assert set(unset_fields) == (set(("termId", )) & set(("parent", "termId", "term", ))) + assert set(unset_fields) == ( + set(("termId",)) + & set( + ( + "parent", + "termId", + "term", + ) + ) + ) def test_create_glossary_term_rest_flattened(): @@ -8215,18 +9291,20 @@ def test_create_glossary_term_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) mock_args.update(sample_request) @@ -8236,7 +9314,7 @@ def test_create_glossary_term_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8246,10 +9324,14 @@ def test_create_glossary_term_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" + % client.transport._host, + args[1], + ) -def test_create_glossary_term_rest_flattened_error(transport: str = 'rest'): +def test_create_glossary_term_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8260,9 +9342,9 @@ def test_create_glossary_term_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_glossary_term( business_glossary.CreateGlossaryTermRequest(), - parent='parent_value', - term=business_glossary.GlossaryTerm(name='name_value'), - term_id='term_id_value', + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", ) @@ -8280,12 +9362,18 @@ def test_update_glossary_term_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.update_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_term + ] = mock_rpc request = {} client.update_glossary_term(request) @@ -8300,54 +9388,59 @@ def test_update_glossary_term_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_glossary_term_rest_required_fields(request_type=business_glossary.UpdateGlossaryTermRequest): +def test_update_glossary_term_rest_required_fields( + request_type=business_glossary.UpdateGlossaryTermRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_term._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8357,24 +9450,32 @@ def test_update_glossary_term_rest_required_fields(request_type=business_glossar return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary_term(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_glossary_term_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_glossary_term._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("term", "updateMask", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "term", + "updateMask", + ) + ) + ) def test_update_glossary_term_rest_flattened(): @@ -8384,17 +9485,21 @@ def test_update_glossary_term_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # get arguments that satisfy an http rule for this method - sample_request = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} + sample_request = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -8404,7 +9509,7 @@ def test_update_glossary_term_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8414,10 +9519,14 @@ def test_update_glossary_term_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) -def test_update_glossary_term_rest_flattened_error(transport: str = 'rest'): +def test_update_glossary_term_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8428,8 +9537,8 @@ def test_update_glossary_term_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_glossary_term( business_glossary.UpdateGlossaryTermRequest(), - term=business_glossary.GlossaryTerm(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -8447,12 +9556,18 @@ def test_delete_glossary_term_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_glossary_term in client._transport._wrapped_methods + assert ( + client._transport.delete_glossary_term in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_term + ] = mock_rpc request = {} client.delete_glossary_term(request) @@ -8467,80 +9582,85 @@ def test_delete_glossary_term_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_glossary_term_rest_required_fields(request_type=business_glossary.DeleteGlossaryTermRequest): +def test_delete_glossary_term_rest_required_fields( + request_type=business_glossary.DeleteGlossaryTermRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary_term(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_glossary_term_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_glossary_term._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_glossary_term_rest_flattened(): @@ -8550,24 +9670,26 @@ def test_delete_glossary_term_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8577,10 +9699,14 @@ def test_delete_glossary_term_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) -def test_delete_glossary_term_rest_flattened_error(transport: str = 'rest'): +def test_delete_glossary_term_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8591,7 +9717,7 @@ def test_delete_glossary_term_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_glossary_term( business_glossary.DeleteGlossaryTermRequest(), - name='name_value', + name="name_value", ) @@ -8613,8 +9739,12 @@ def test_get_glossary_term_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_glossary_term] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_term + ] = mock_rpc request = {} client.get_glossary_term(request) @@ -8629,55 +9759,60 @@ def test_get_glossary_term_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_glossary_term_rest_required_fields(request_type=business_glossary.GetGlossaryTermRequest): +def test_get_glossary_term_rest_required_fields( + request_type=business_glossary.GetGlossaryTermRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_glossary_term._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_term._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -8688,24 +9823,24 @@ def test_get_glossary_term_rest_required_fields(request_type=business_glossary.G return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary_term(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_glossary_term_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_glossary_term._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_glossary_term_rest_flattened(): @@ -8715,16 +9850,18 @@ def test_get_glossary_term_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -8734,7 +9871,7 @@ def test_get_glossary_term_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8744,10 +9881,14 @@ def test_get_glossary_term_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) -def test_get_glossary_term_rest_flattened_error(transport: str = 'rest'): +def test_get_glossary_term_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8758,7 +9899,7 @@ def test_get_glossary_term_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_glossary_term( business_glossary.GetGlossaryTermRequest(), - name='name_value', + name="name_value", ) @@ -8776,12 +9917,18 @@ def test_list_glossary_terms_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_glossary_terms in client._transport._wrapped_methods + assert ( + client._transport.list_glossary_terms in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_glossary_terms] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_terms + ] = mock_rpc request = {} client.list_glossary_terms(request) @@ -8796,57 +9943,69 @@ def test_list_glossary_terms_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_glossary_terms_rest_required_fields(request_type=business_glossary.ListGlossaryTermsRequest): +def test_list_glossary_terms_rest_required_fields( + request_type=business_glossary.ListGlossaryTermsRequest, +): transport_class = transports.BusinessGlossaryServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_terms._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_terms._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_glossary_terms._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_terms._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryTermsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -8857,24 +10016,34 @@ def test_list_glossary_terms_rest_required_fields(request_type=business_glossary return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossary_terms(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_glossary_terms_rest_unset_required_fields(): - transport = transports.BusinessGlossaryServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_glossary_terms._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_glossary_terms_rest_flattened(): @@ -8884,16 +10053,18 @@ def test_list_glossary_terms_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryTermsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -8903,7 +10074,7 @@ def test_list_glossary_terms_rest_flattened(): # Convert return value to protobuf type return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8913,10 +10084,14 @@ def test_list_glossary_terms_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" + % client.transport._host, + args[1], + ) -def test_list_glossary_terms_rest_flattened_error(transport: str = 'rest'): +def test_list_glossary_terms_rest_flattened_error(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8927,20 +10102,20 @@ def test_list_glossary_terms_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_glossary_terms( business_glossary.ListGlossaryTermsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_glossary_terms_rest_pager(transport: str = 'rest'): +def test_list_glossary_terms_rest_pager(transport: str = "rest"): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( business_glossary.ListGlossaryTermsResponse( @@ -8949,17 +10124,17 @@ def test_list_glossary_terms_rest_pager(transport: str = 'rest'): business_glossary.GlossaryTerm(), business_glossary.GlossaryTerm(), ], - next_page_token='abc', + next_page_token="abc", ), business_glossary.ListGlossaryTermsResponse( terms=[], - next_page_token='def', + next_page_token="def", ), business_glossary.ListGlossaryTermsResponse( terms=[ business_glossary.GlossaryTerm(), ], - next_page_token='ghi', + next_page_token="ghi", ), business_glossary.ListGlossaryTermsResponse( terms=[ @@ -8972,24 +10147,27 @@ def test_list_glossary_terms_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(business_glossary.ListGlossaryTermsResponse.to_json(x) for x in response) + response = tuple( + business_glossary.ListGlossaryTermsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } pager = client.list_glossary_terms(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, business_glossary.GlossaryTerm) - for i in results) + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in results) pages = list(client.list_glossary_terms(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9031,8 +10209,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = BusinessGlossaryServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -9054,6 +10231,7 @@ def test_transport_instance(): client = BusinessGlossaryServiceClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.BusinessGlossaryServiceGrpcTransport( @@ -9068,18 +10246,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.BusinessGlossaryServiceGrpcTransport, - transports.BusinessGlossaryServiceGrpcAsyncIOTransport, - transports.BusinessGlossaryServiceRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + transports.BusinessGlossaryServiceRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = BusinessGlossaryServiceClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -9089,8 +10272,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -9104,10 +10286,8 @@ def test_create_glossary_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_glossary(request=None) # Establish that the underlying stub method was called. @@ -9127,10 +10307,8 @@ def test_update_glossary_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_glossary(request=None) # Establish that the underlying stub method was called. @@ -9150,10 +10328,8 @@ def test_delete_glossary_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_glossary(request=None) # Establish that the underlying stub method was called. @@ -9173,9 +10349,7 @@ def test_get_glossary_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: call.return_value = business_glossary.Glossary() client.get_glossary(request=None) @@ -9196,9 +10370,7 @@ def test_list_glossaries_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: call.return_value = business_glossary.ListGlossariesResponse() client.list_glossaries(request=None) @@ -9220,8 +10392,8 @@ def test_create_glossary_category_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.create_glossary_category(request=None) @@ -9243,8 +10415,8 @@ def test_update_glossary_category_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.update_glossary_category(request=None) @@ -9266,8 +10438,8 @@ def test_delete_glossary_category_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: call.return_value = None client.delete_glossary_category(request=None) @@ -9289,8 +10461,8 @@ def test_get_glossary_category_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: call.return_value = business_glossary.GlossaryCategory() client.get_glossary_category(request=None) @@ -9312,8 +10484,8 @@ def test_list_glossary_categories_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: call.return_value = business_glossary.ListGlossaryCategoriesResponse() client.list_glossary_categories(request=None) @@ -9335,8 +10507,8 @@ def test_create_glossary_term_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.create_glossary_term(request=None) @@ -9358,8 +10530,8 @@ def test_update_glossary_term_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.update_glossary_term(request=None) @@ -9381,8 +10553,8 @@ def test_delete_glossary_term_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: call.return_value = None client.delete_glossary_term(request=None) @@ -9404,8 +10576,8 @@ def test_get_glossary_term_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: call.return_value = business_glossary.GlossaryTerm() client.get_glossary_term(request=None) @@ -9427,8 +10599,8 @@ def test_list_glossary_terms_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: call.return_value = business_glossary.ListGlossaryTermsResponse() client.list_glossary_terms(request=None) @@ -9449,8 +10621,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -9465,12 +10636,10 @@ async def test_create_glossary_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_glossary(request=None) @@ -9492,12 +10661,10 @@ async def test_update_glossary_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_glossary(request=None) @@ -9519,12 +10686,10 @@ async def test_delete_glossary_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_glossary(request=None) @@ -9546,19 +10711,19 @@ async def test_get_glossary_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.Glossary( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - term_count=1088, - category_count=1510, - etag='etag_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + ) await client.get_glossary(request=None) # Establish that the underlying stub method was called. @@ -9579,14 +10744,14 @@ async def test_list_glossaries_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossariesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) await client.list_glossaries(request=None) # Establish that the underlying stub method was called. @@ -9608,16 +10773,18 @@ async def test_create_glossary_category_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.create_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -9639,16 +10806,18 @@ async def test_update_glossary_category_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.update_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -9670,8 +10839,8 @@ async def test_delete_glossary_category_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_glossary_category(request=None) @@ -9695,16 +10864,18 @@ async def test_get_glossary_category_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.get_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -9726,13 +10897,15 @@ async def test_list_glossary_categories_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryCategoriesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) await client.list_glossary_categories(request=None) # Establish that the underlying stub method was called. @@ -9754,16 +10927,18 @@ async def test_create_glossary_term_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.create_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -9785,16 +10960,18 @@ async def test_update_glossary_term_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.update_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -9816,8 +10993,8 @@ async def test_delete_glossary_term_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_glossary_term(request=None) @@ -9841,16 +11018,18 @@ async def test_get_glossary_term_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) await client.get_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -9872,13 +11051,15 @@ async def test_list_glossary_terms_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(business_glossary.ListGlossaryTermsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) await client.list_glossary_terms(request=None) # Establish that the underlying stub method was called. @@ -9896,20 +11077,23 @@ def test_transport_kind_rest(): assert transport.kind == "rest" -def test_create_glossary_rest_bad_request(request_type=business_glossary.CreateGlossaryRequest): +def test_create_glossary_rest_bad_request( + request_type=business_glossary.CreateGlossaryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -9918,19 +11102,32 @@ def test_create_glossary_rest_bad_request(request_type=business_glossary.CreateG client.create_glossary(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryRequest, + dict, + ], +) def test_create_glossary_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["glossary"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'term_count': 1088, 'category_count': 1510, 'etag': 'etag_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["glossary"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "term_count": 1088, + "category_count": 1510, + "etag": "etag_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -9950,7 +11147,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -9964,7 +11161,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["glossary"].items(): # pragma: NO COVER + for field, value in request_init["glossary"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -9979,12 +11176,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -9997,15 +11198,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_glossary(request) @@ -10018,20 +11219,32 @@ def get_message_fields(field): def test_create_glossary_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.CreateGlossaryRequest.pb(business_glossary.CreateGlossaryRequest()) + pb_message = business_glossary.CreateGlossaryRequest.pb( + business_glossary.CreateGlossaryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10046,7 +11259,7 @@ def test_create_glossary_rest_interceptors(null_interceptor): req.return_value.content = return_value request = business_glossary.CreateGlossaryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10054,27 +11267,38 @@ def test_create_glossary_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_glossary_rest_bad_request(request_type=business_glossary.UpdateGlossaryRequest): +def test_update_glossary_rest_bad_request( + request_type=business_glossary.UpdateGlossaryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10083,19 +11307,34 @@ def test_update_glossary_rest_bad_request(request_type=business_glossary.UpdateG client.update_glossary(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryRequest, + dict, + ], +) def test_update_glossary_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'glossary': {'name': 'projects/sample1/locations/sample2/glossaries/sample3'}} - request_init["glossary"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'term_count': 1088, 'category_count': 1510, 'etag': 'etag_value'} + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } + request_init["glossary"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "term_count": 1088, + "category_count": 1510, + "etag": "etag_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -10115,7 +11354,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10129,7 +11368,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["glossary"].items(): # pragma: NO COVER + for field, value in request_init["glossary"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10144,12 +11383,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10162,15 +11405,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary(request) @@ -10183,20 +11426,32 @@ def get_message_fields(field): def test_update_glossary_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.UpdateGlossaryRequest.pb(business_glossary.UpdateGlossaryRequest()) + pb_message = business_glossary.UpdateGlossaryRequest.pb( + business_glossary.UpdateGlossaryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10211,7 +11466,7 @@ def test_update_glossary_rest_interceptors(null_interceptor): req.return_value.content = return_value request = business_glossary.UpdateGlossaryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10219,27 +11474,36 @@ def test_update_glossary_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_glossary_rest_bad_request(request_type=business_glossary.DeleteGlossaryRequest): +def test_delete_glossary_rest_bad_request( + request_type=business_glossary.DeleteGlossaryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10248,30 +11512,32 @@ def test_delete_glossary_rest_bad_request(request_type=business_glossary.DeleteG client.delete_glossary(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryRequest, + dict, + ], +) def test_delete_glossary_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary(request) @@ -10284,20 +11550,32 @@ def test_delete_glossary_rest_call_success(request_type): def test_delete_glossary_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_delete_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.DeleteGlossaryRequest.pb(business_glossary.DeleteGlossaryRequest()) + pb_message = business_glossary.DeleteGlossaryRequest.pb( + business_glossary.DeleteGlossaryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10312,7 +11590,7 @@ def test_delete_glossary_rest_interceptors(null_interceptor): req.return_value.content = return_value request = business_glossary.DeleteGlossaryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10320,27 +11598,36 @@ def test_delete_glossary_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_glossary_rest_bad_request(request_type=business_glossary.GetGlossaryRequest): +def test_get_glossary_rest_bad_request( + request_type=business_glossary.GetGlossaryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10349,31 +11636,33 @@ def test_get_glossary_rest_bad_request(request_type=business_glossary.GetGlossar client.get_glossary(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryRequest, + dict, + ], +) def test_get_glossary_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.Glossary( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - term_count=1088, - category_count=1510, - etag='etag_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", ) # Wrap the value into a proper Response obj @@ -10383,39 +11672,50 @@ def test_get_glossary_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.Glossary.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.Glossary) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" assert response.term_count == 1088 assert response.category_count == 1510 - assert response.etag == 'etag_value' + assert response.etag == "etag_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_glossary_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.GetGlossaryRequest.pb(business_glossary.GetGlossaryRequest()) + pb_message = business_glossary.GetGlossaryRequest.pb( + business_glossary.GetGlossaryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10430,7 +11730,7 @@ def test_get_glossary_rest_interceptors(null_interceptor): req.return_value.content = return_value request = business_glossary.GetGlossaryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10438,27 +11738,36 @@ def test_get_glossary_rest_interceptors(null_interceptor): post.return_value = business_glossary.Glossary() post_with_metadata.return_value = business_glossary.Glossary(), metadata - client.get_glossary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_glossaries_rest_bad_request(request_type=business_glossary.ListGlossariesRequest): +def test_list_glossaries_rest_bad_request( + request_type=business_glossary.ListGlossariesRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10467,26 +11776,28 @@ def test_list_glossaries_rest_bad_request(request_type=business_glossary.ListGlo client.list_glossaries(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossariesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossariesRequest, + dict, + ], +) def test_list_glossaries_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossariesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -10496,34 +11807,45 @@ def test_list_glossaries_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.ListGlossariesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossaries(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossariesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_glossaries_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossaries") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossaries_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossaries" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.ListGlossariesRequest.pb(business_glossary.ListGlossariesRequest()) + pb_message = business_glossary.ListGlossariesRequest.pb( + business_glossary.ListGlossariesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10534,39 +11856,53 @@ def test_list_glossaries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.ListGlossariesResponse.to_json(business_glossary.ListGlossariesResponse()) + return_value = business_glossary.ListGlossariesResponse.to_json( + business_glossary.ListGlossariesResponse() + ) req.return_value.content = return_value request = business_glossary.ListGlossariesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = business_glossary.ListGlossariesResponse() - post_with_metadata.return_value = business_glossary.ListGlossariesResponse(), metadata + post_with_metadata.return_value = ( + business_glossary.ListGlossariesResponse(), + metadata, + ) - client.list_glossaries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_glossaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_glossary_category_rest_bad_request(request_type=business_glossary.CreateGlossaryCategoryRequest): +def test_create_glossary_category_rest_bad_request( + request_type=business_glossary.CreateGlossaryCategoryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10575,19 +11911,30 @@ def test_create_glossary_category_rest_bad_request(request_type=business_glossar client.create_glossary_category(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryCategoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryCategoryRequest, + dict, + ], +) def test_create_glossary_category_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} - request_init["category"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init["category"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -10607,7 +11954,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10621,7 +11968,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["category"].items(): # pragma: NO COVER + for field, value in request_init["category"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10636,12 +11983,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10654,14 +12005,14 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -10671,37 +12022,50 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_glossary_category(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_glossary_category_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_category") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_category_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_category") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_category", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_create_glossary_category", + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.CreateGlossaryCategoryRequest.pb(business_glossary.CreateGlossaryCategoryRequest()) + pb_message = business_glossary.CreateGlossaryCategoryRequest.pb( + business_glossary.CreateGlossaryCategoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10712,11 +12076,13 @@ def test_create_glossary_category_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) req.return_value.content = return_value request = business_glossary.CreateGlossaryCategoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10724,27 +12090,40 @@ def test_create_glossary_category_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryCategory() post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata - client.create_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_glossary_category_rest_bad_request(request_type=business_glossary.UpdateGlossaryCategoryRequest): +def test_update_glossary_category_rest_bad_request( + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} + request_init = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10753,19 +12132,34 @@ def test_update_glossary_category_rest_bad_request(request_type=business_glossar client.update_glossary_category(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryCategoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, + ], +) def test_update_glossary_category_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'category': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'}} - request_init["category"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + request_init = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } + request_init["category"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -10785,7 +12179,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10799,7 +12193,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["category"].items(): # pragma: NO COVER + for field, value in request_init["category"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10814,12 +12208,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10832,14 +12230,14 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -10849,37 +12247,50 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary_category(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_glossary_category_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_category") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_category_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_category") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_category", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_update_glossary_category", + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.UpdateGlossaryCategoryRequest.pb(business_glossary.UpdateGlossaryCategoryRequest()) + pb_message = business_glossary.UpdateGlossaryCategoryRequest.pb( + business_glossary.UpdateGlossaryCategoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10890,11 +12301,13 @@ def test_update_glossary_category_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) req.return_value.content = return_value request = business_glossary.UpdateGlossaryCategoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10902,27 +12315,38 @@ def test_update_glossary_category_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryCategory() post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata - client.update_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_glossary_category_rest_bad_request(request_type=business_glossary.DeleteGlossaryCategoryRequest): +def test_delete_glossary_category_rest_bad_request( + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -10931,30 +12355,34 @@ def test_delete_glossary_category_rest_bad_request(request_type=business_glossar client.delete_glossary_category(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryCategoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, + ], +) def test_delete_glossary_category_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary_category(request) @@ -10967,15 +12395,24 @@ def test_delete_glossary_category_rest_call_success(request_type): def test_delete_glossary_category_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_category") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_delete_glossary_category", + ) as pre: pre.assert_not_called() - pb_message = business_glossary.DeleteGlossaryCategoryRequest.pb(business_glossary.DeleteGlossaryCategoryRequest()) + pb_message = business_glossary.DeleteGlossaryCategoryRequest.pb( + business_glossary.DeleteGlossaryCategoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10988,31 +12425,42 @@ def test_delete_glossary_category_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = business_glossary.DeleteGlossaryCategoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_get_glossary_category_rest_bad_request(request_type=business_glossary.GetGlossaryCategoryRequest): +def test_get_glossary_category_rest_bad_request( + request_type=business_glossary.GetGlossaryCategoryRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11021,29 +12469,33 @@ def test_get_glossary_category_rest_bad_request(request_type=business_glossary.G client.get_glossary_category(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryCategoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryCategoryRequest, + dict, + ], +) def test_get_glossary_category_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/categories/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryCategory( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -11053,37 +12505,48 @@ def test_get_glossary_category_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.GlossaryCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary_category(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryCategory) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_glossary_category_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_category") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_category" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.GetGlossaryCategoryRequest.pb(business_glossary.GetGlossaryCategoryRequest()) + pb_message = business_glossary.GetGlossaryCategoryRequest.pb( + business_glossary.GetGlossaryCategoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11094,11 +12557,13 @@ def test_get_glossary_category_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryCategory.to_json(business_glossary.GlossaryCategory()) + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) req.return_value.content = return_value request = business_glossary.GetGlossaryCategoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -11106,27 +12571,36 @@ def test_get_glossary_category_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryCategory() post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata - client.get_glossary_category(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_glossary_categories_rest_bad_request(request_type=business_glossary.ListGlossaryCategoriesRequest): +def test_list_glossary_categories_rest_bad_request( + request_type=business_glossary.ListGlossaryCategoriesRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11135,26 +12609,28 @@ def test_list_glossary_categories_rest_bad_request(request_type=business_glossar client.list_glossary_categories(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossaryCategoriesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryCategoriesRequest, + dict, + ], +) def test_list_glossary_categories_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryCategoriesResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -11164,34 +12640,47 @@ def test_list_glossary_categories_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossary_categories(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryCategoriesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_glossary_categories_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_categories") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_categories_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_categories") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_categories", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_categories_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_list_glossary_categories", + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.ListGlossaryCategoriesRequest.pb(business_glossary.ListGlossaryCategoriesRequest()) + pb_message = business_glossary.ListGlossaryCategoriesRequest.pb( + business_glossary.ListGlossaryCategoriesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11202,39 +12691,53 @@ def test_list_glossary_categories_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.ListGlossaryCategoriesResponse.to_json(business_glossary.ListGlossaryCategoriesResponse()) + return_value = business_glossary.ListGlossaryCategoriesResponse.to_json( + business_glossary.ListGlossaryCategoriesResponse() + ) req.return_value.content = return_value request = business_glossary.ListGlossaryCategoriesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = business_glossary.ListGlossaryCategoriesResponse() - post_with_metadata.return_value = business_glossary.ListGlossaryCategoriesResponse(), metadata + post_with_metadata.return_value = ( + business_glossary.ListGlossaryCategoriesResponse(), + metadata, + ) - client.list_glossary_categories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_glossary_categories( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_glossary_term_rest_bad_request(request_type=business_glossary.CreateGlossaryTermRequest): +def test_create_glossary_term_rest_bad_request( + request_type=business_glossary.CreateGlossaryTermRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11243,19 +12746,30 @@ def test_create_glossary_term_rest_bad_request(request_type=business_glossary.Cr client.create_glossary_term(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.CreateGlossaryTermRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryTermRequest, + dict, + ], +) def test_create_glossary_term_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} - request_init["term"] = {'name': 'name_value', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init["term"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -11275,7 +12789,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -11289,7 +12803,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["term"].items(): # pragma: NO COVER + for field, value in request_init["term"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -11304,12 +12818,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -11322,14 +12840,14 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -11339,37 +12857,48 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_glossary_term(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_glossary_term_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_term") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_term" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.CreateGlossaryTermRequest.pb(business_glossary.CreateGlossaryTermRequest()) + pb_message = business_glossary.CreateGlossaryTermRequest.pb( + business_glossary.CreateGlossaryTermRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11380,11 +12909,13 @@ def test_create_glossary_term_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) req.return_value.content = return_value request = business_glossary.CreateGlossaryTermRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -11392,27 +12923,40 @@ def test_create_glossary_term_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryTerm() post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata - client.create_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_glossary_term_rest_bad_request(request_type=business_glossary.UpdateGlossaryTermRequest): +def test_update_glossary_term_rest_bad_request( + request_type=business_glossary.UpdateGlossaryTermRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} + request_init = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11421,19 +12965,34 @@ def test_update_glossary_term_rest_bad_request(request_type=business_glossary.Up client.update_glossary_term(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.UpdateGlossaryTermRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryTermRequest, + dict, + ], +) def test_update_glossary_term_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'term': {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'}} - request_init["term"] = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4', 'uid': 'uid_value', 'display_name': 'display_name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'parent': 'parent_value'} + request_init = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } + request_init["term"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -11453,7 +13012,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -11467,7 +13026,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["term"].items(): # pragma: NO COVER + for field, value in request_init["term"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -11482,12 +13041,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -11500,14 +13063,14 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -11517,37 +13080,48 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_glossary_term(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_glossary_term_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_term") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_term" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.UpdateGlossaryTermRequest.pb(business_glossary.UpdateGlossaryTermRequest()) + pb_message = business_glossary.UpdateGlossaryTermRequest.pb( + business_glossary.UpdateGlossaryTermRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11558,11 +13132,13 @@ def test_update_glossary_term_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) req.return_value.content = return_value request = business_glossary.UpdateGlossaryTermRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -11570,27 +13146,38 @@ def test_update_glossary_term_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryTerm() post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata - client.update_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_glossary_term_rest_bad_request(request_type=business_glossary.DeleteGlossaryTermRequest): +def test_delete_glossary_term_rest_bad_request( + request_type=business_glossary.DeleteGlossaryTermRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11599,30 +13186,34 @@ def test_delete_glossary_term_rest_bad_request(request_type=business_glossary.De client.delete_glossary_term(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.DeleteGlossaryTermRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryTermRequest, + dict, + ], +) def test_delete_glossary_term_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_glossary_term(request) @@ -11635,15 +13226,23 @@ def test_delete_glossary_term_rest_call_success(request_type): def test_delete_glossary_term_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_term") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_term" + ) as pre: pre.assert_not_called() - pb_message = business_glossary.DeleteGlossaryTermRequest.pb(business_glossary.DeleteGlossaryTermRequest()) + pb_message = business_glossary.DeleteGlossaryTermRequest.pb( + business_glossary.DeleteGlossaryTermRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11656,31 +13255,42 @@ def test_delete_glossary_term_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = business_glossary.DeleteGlossaryTermRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_get_glossary_term_rest_bad_request(request_type=business_glossary.GetGlossaryTermRequest): +def test_get_glossary_term_rest_bad_request( + request_type=business_glossary.GetGlossaryTermRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11689,29 +13299,33 @@ def test_get_glossary_term_rest_bad_request(request_type=business_glossary.GetGl client.get_glossary_term(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.GetGlossaryTermRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryTermRequest, + dict, + ], +) def test_get_glossary_term_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/glossaries/sample3/terms/sample4'} + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.GlossaryTerm( - name='name_value', - uid='uid_value', - display_name='display_name_value', - description='description_value', - parent='parent_value', + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", ) # Wrap the value into a proper Response obj @@ -11721,37 +13335,48 @@ def test_get_glossary_term_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.GlossaryTerm.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_glossary_term(request) # Establish that the response is the type that we expect. assert isinstance(response, business_glossary.GlossaryTerm) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent == 'parent_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_glossary_term_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_term") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_term" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.GetGlossaryTermRequest.pb(business_glossary.GetGlossaryTermRequest()) + pb_message = business_glossary.GetGlossaryTermRequest.pb( + business_glossary.GetGlossaryTermRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11762,11 +13387,13 @@ def test_get_glossary_term_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.GlossaryTerm.to_json(business_glossary.GlossaryTerm()) + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) req.return_value.content = return_value request = business_glossary.GetGlossaryTermRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -11774,27 +13401,36 @@ def test_get_glossary_term_rest_interceptors(null_interceptor): post.return_value = business_glossary.GlossaryTerm() post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata - client.get_glossary_term(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_glossary_terms_rest_bad_request(request_type=business_glossary.ListGlossaryTermsRequest): +def test_list_glossary_terms_rest_bad_request( + request_type=business_glossary.ListGlossaryTermsRequest, +): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -11803,26 +13439,28 @@ def test_list_glossary_terms_rest_bad_request(request_type=business_glossary.Lis client.list_glossary_terms(request) -@pytest.mark.parametrize("request_type", [ - business_glossary.ListGlossaryTermsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryTermsRequest, + dict, + ], +) def test_list_glossary_terms_rest_call_success(request_type): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/glossaries/sample3'} + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = business_glossary.ListGlossaryTermsResponse( - next_page_token='next_page_token_value', - unreachable_locations=['unreachable_locations_value'], + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -11832,34 +13470,45 @@ def test_list_glossary_terms_rest_call_success(request_type): # Convert return value to protobuf type return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_glossary_terms(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGlossaryTermsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_locations == ['unreachable_locations_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_glossary_terms_rest_interceptors(null_interceptor): transport = transports.BusinessGlossaryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BusinessGlossaryServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) client = BusinessGlossaryServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms") as post, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_terms") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_terms_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_terms" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = business_glossary.ListGlossaryTermsRequest.pb(business_glossary.ListGlossaryTermsRequest()) + pb_message = business_glossary.ListGlossaryTermsRequest.pb( + business_glossary.ListGlossaryTermsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11870,19 +13519,30 @@ def test_list_glossary_terms_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = business_glossary.ListGlossaryTermsResponse.to_json(business_glossary.ListGlossaryTermsResponse()) + return_value = business_glossary.ListGlossaryTermsResponse.to_json( + business_glossary.ListGlossaryTermsResponse() + ) req.return_value.content = return_value request = business_glossary.ListGlossaryTermsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = business_glossary.ListGlossaryTermsResponse() - post_with_metadata.return_value = business_glossary.ListGlossaryTermsResponse(), metadata + post_with_metadata.return_value = ( + business_glossary.ListGlossaryTermsResponse(), + metadata, + ) - client.list_glossary_terms(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_glossary_terms( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -11895,13 +13555,17 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -11910,20 +13574,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -11931,7 +13598,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11942,19 +13609,23 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -11963,20 +13634,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -11984,7 +13658,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11995,19 +13669,25 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -12016,28 +13696,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12048,19 +13731,25 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -12069,28 +13758,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12101,19 +13793,25 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -12122,20 +13820,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -12143,7 +13844,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12154,19 +13855,25 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -12175,20 +13882,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -12196,7 +13906,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12206,10 +13916,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -12223,9 +13933,7 @@ def test_create_glossary_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: client.create_glossary(request=None) # Establish that the underlying stub method was called. @@ -12245,9 +13953,7 @@ def test_update_glossary_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: client.update_glossary(request=None) # Establish that the underlying stub method was called. @@ -12267,9 +13973,7 @@ def test_delete_glossary_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: client.delete_glossary(request=None) # Establish that the underlying stub method was called. @@ -12289,9 +13993,7 @@ def test_get_glossary_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_glossary), - '__call__') as call: + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: client.get_glossary(request=None) # Establish that the underlying stub method was called. @@ -12311,9 +14013,7 @@ def test_list_glossaries_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_glossaries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: client.list_glossaries(request=None) # Establish that the underlying stub method was called. @@ -12334,8 +14034,8 @@ def test_create_glossary_category_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_category), - '__call__') as call: + type(client.transport.create_glossary_category), "__call__" + ) as call: client.create_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -12356,8 +14056,8 @@ def test_update_glossary_category_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_category), - '__call__') as call: + type(client.transport.update_glossary_category), "__call__" + ) as call: client.update_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -12378,8 +14078,8 @@ def test_delete_glossary_category_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_category), - '__call__') as call: + type(client.transport.delete_glossary_category), "__call__" + ) as call: client.delete_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -12400,8 +14100,8 @@ def test_get_glossary_category_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_category), - '__call__') as call: + type(client.transport.get_glossary_category), "__call__" + ) as call: client.get_glossary_category(request=None) # Establish that the underlying stub method was called. @@ -12422,8 +14122,8 @@ def test_list_glossary_categories_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_categories), - '__call__') as call: + type(client.transport.list_glossary_categories), "__call__" + ) as call: client.list_glossary_categories(request=None) # Establish that the underlying stub method was called. @@ -12444,8 +14144,8 @@ def test_create_glossary_term_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_glossary_term), - '__call__') as call: + type(client.transport.create_glossary_term), "__call__" + ) as call: client.create_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -12466,8 +14166,8 @@ def test_update_glossary_term_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_glossary_term), - '__call__') as call: + type(client.transport.update_glossary_term), "__call__" + ) as call: client.update_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -12488,8 +14188,8 @@ def test_delete_glossary_term_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_glossary_term), - '__call__') as call: + type(client.transport.delete_glossary_term), "__call__" + ) as call: client.delete_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -12510,8 +14210,8 @@ def test_get_glossary_term_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_glossary_term), - '__call__') as call: + type(client.transport.get_glossary_term), "__call__" + ) as call: client.get_glossary_term(request=None) # Establish that the underlying stub method was called. @@ -12532,8 +14232,8 @@ def test_list_glossary_terms_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_glossary_terms), - '__call__') as call: + type(client.transport.list_glossary_terms), "__call__" + ) as call: client.list_glossary_terms(request=None) # Establish that the underlying stub method was called. @@ -12554,12 +14254,13 @@ def test_business_glossary_service_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = BusinessGlossaryServiceClient( @@ -12570,18 +14271,21 @@ def test_transport_grpc_default(): transports.BusinessGlossaryServiceGrpcTransport, ) + def test_business_glossary_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.BusinessGlossaryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_business_glossary_service_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport.__init__') as Transport: + with mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.BusinessGlossaryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -12590,27 +14294,27 @@ def test_business_glossary_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'create_glossary', - 'update_glossary', - 'delete_glossary', - 'get_glossary', - 'list_glossaries', - 'create_glossary_category', - 'update_glossary_category', - 'delete_glossary_category', - 'get_glossary_category', - 'list_glossary_categories', - 'create_glossary_term', - 'update_glossary_term', - 'delete_glossary_term', - 'get_glossary_term', - 'list_glossary_terms', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "create_glossary", + "update_glossary", + "delete_glossary", + "get_glossary", + "list_glossaries", + "create_glossary_category", + "update_glossary_category", + "delete_glossary_category", + "get_glossary_category", + "list_glossary_categories", + "create_glossary_term", + "update_glossary_term", + "delete_glossary_term", + "get_glossary_term", + "list_glossary_terms", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -12626,7 +14330,7 @@ def test_business_glossary_service_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -12635,25 +14339,30 @@ def test_business_glossary_service_base_transport(): def test_business_glossary_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.BusinessGlossaryServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_business_glossary_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.BusinessGlossaryServiceTransport() @@ -12662,14 +14371,12 @@ def test_business_glossary_service_base_transport_with_adc(): def test_business_glossary_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) BusinessGlossaryServiceClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -12684,12 +14391,12 @@ def test_business_glossary_service_auth_adc(): def test_business_glossary_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -12703,48 +14410,47 @@ def test_business_glossary_service_transport_auth_adc(transport_class): ], ) def test_business_glossary_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.BusinessGlossaryServiceGrpcTransport, grpc_helpers), - (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, grpc_helpers_async) + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, grpc_helpers_async), ], ) -def test_business_glossary_service_transport_create_channel(transport_class, grpc_helpers): +def test_business_glossary_service_transport_create_channel( + transport_class, grpc_helpers +): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "dataplex.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="dataplex.googleapis.com", ssl_credentials=None, @@ -12755,9 +14461,15 @@ def test_business_glossary_service_transport_create_channel(transport_class, grp ) -@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) def test_business_glossary_service_grpc_transport_client_cert_source_for_mtls( - transport_class + transport_class, ): cred = ga_credentials.AnonymousCredentials() @@ -12767,7 +14479,7 @@ def test_business_glossary_service_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -12788,61 +14500,77 @@ def test_business_glossary_service_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_business_glossary_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BusinessGlossaryServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BusinessGlossaryServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_business_glossary_service_host_no_port(transport_name): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'dataplex.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com' + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_business_glossary_service_host_with_port(transport_name): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataplex.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'dataplex.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataplex.googleapis.com:8000' + "dataplex.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_business_glossary_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -12899,8 +14627,10 @@ def test_business_glossary_service_client_transport_session_collision(transport_ session1 = client1.transport.list_glossary_terms._session session2 = client2.transport.list_glossary_terms._session assert session1 != session2 + + def test_business_glossary_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.BusinessGlossaryServiceGrpcTransport( @@ -12913,7 +14643,7 @@ def test_business_glossary_service_grpc_transport_channel(): def test_business_glossary_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.BusinessGlossaryServiceGrpcAsyncIOTransport( @@ -12927,12 +14657,22 @@ def test_business_glossary_service_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) def test_business_glossary_service_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -12941,7 +14681,7 @@ def test_business_glossary_service_transport_channel_mtls_with_client_cert_sourc cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -12971,17 +14711,23 @@ def test_business_glossary_service_transport_channel_mtls_with_client_cert_sourc # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BusinessGlossaryServiceGrpcTransport, transports.BusinessGlossaryServiceGrpcAsyncIOTransport]) -def test_business_glossary_service_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -13012,7 +14758,7 @@ def test_business_glossary_service_transport_channel_mtls_with_adc( def test_business_glossary_service_grpc_lro_client(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -13029,7 +14775,7 @@ def test_business_glossary_service_grpc_lro_client(): def test_business_glossary_service_grpc_lro_async_client(): client = BusinessGlossaryServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -13047,7 +14793,11 @@ def test_glossary_path(): project = "squid" location = "clam" glossary = "whelk" - expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format(project=project, location=location, glossary=glossary, ) + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) actual = BusinessGlossaryServiceClient.glossary_path(project, location, glossary) assert expected == actual @@ -13064,13 +14814,21 @@ def test_parse_glossary_path(): actual = BusinessGlossaryServiceClient.parse_glossary_path(path) assert expected == actual + def test_glossary_category_path(): project = "cuttlefish" location = "mussel" glossary = "winkle" glossary_category = "nautilus" - expected = "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format(project=project, location=location, glossary=glossary, glossary_category=glossary_category, ) - actual = BusinessGlossaryServiceClient.glossary_category_path(project, location, glossary, glossary_category) + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format( + project=project, + location=location, + glossary=glossary, + glossary_category=glossary_category, + ) + actual = BusinessGlossaryServiceClient.glossary_category_path( + project, location, glossary, glossary_category + ) assert expected == actual @@ -13087,13 +14845,21 @@ def test_parse_glossary_category_path(): actual = BusinessGlossaryServiceClient.parse_glossary_category_path(path) assert expected == actual + def test_glossary_term_path(): project = "whelk" location = "octopus" glossary = "oyster" glossary_term = "nudibranch" - expected = "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format(project=project, location=location, glossary=glossary, glossary_term=glossary_term, ) - actual = BusinessGlossaryServiceClient.glossary_term_path(project, location, glossary, glossary_term) + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format( + project=project, + location=location, + glossary=glossary, + glossary_term=glossary_term, + ) + actual = BusinessGlossaryServiceClient.glossary_term_path( + project, location, glossary, glossary_term + ) assert expected == actual @@ -13110,9 +14876,12 @@ def test_parse_glossary_term_path(): actual = BusinessGlossaryServiceClient.parse_glossary_term_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = BusinessGlossaryServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -13127,9 +14896,12 @@ def test_parse_common_billing_account_path(): actual = BusinessGlossaryServiceClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = BusinessGlossaryServiceClient.common_folder_path(folder) assert expected == actual @@ -13144,9 +14916,12 @@ def test_parse_common_folder_path(): actual = BusinessGlossaryServiceClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = BusinessGlossaryServiceClient.common_organization_path(organization) assert expected == actual @@ -13161,9 +14936,12 @@ def test_parse_common_organization_path(): actual = BusinessGlossaryServiceClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "oyster" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = BusinessGlossaryServiceClient.common_project_path(project) assert expected == actual @@ -13178,10 +14956,14 @@ def test_parse_common_project_path(): actual = BusinessGlossaryServiceClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "cuttlefish" location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = BusinessGlossaryServiceClient.common_location_path(project, location) assert expected == actual @@ -13201,14 +14983,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.BusinessGlossaryServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.BusinessGlossaryServiceTransport, "_prep_wrapped_messages" + ) as prep: client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.BusinessGlossaryServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.BusinessGlossaryServiceTransport, "_prep_wrapped_messages" + ) as prep: transport_class = BusinessGlossaryServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -13219,7 +15005,8 @@ def test_client_with_default_client_info(): def test_delete_operation(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13238,10 +15025,13 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + + @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13251,9 +15041,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13263,6 +15051,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert response is None + def test_delete_operation_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13275,7 +15064,7 @@ def test_delete_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.delete_operation(request) # Establish that the underlying gRPC stub method was called. @@ -13285,7 +15074,12 @@ def test_delete_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13299,9 +15093,7 @@ async def test_delete_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13310,7 +15102,11 @@ async def test_delete_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_delete_operation_from_dict(): client = BusinessGlossaryServiceClient( @@ -13327,6 +15123,8 @@ def test_delete_operation_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13335,9 +15133,7 @@ async def test_delete_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation( request={ "name": "locations", @@ -13348,7 +15144,8 @@ async def test_delete_operation_from_dict_async(): def test_cancel_operation(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13367,10 +15164,13 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13380,9 +15180,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13392,6 +15190,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert response is None + def test_cancel_operation_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13404,7 +15203,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -13414,7 +15213,12 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13428,9 +15232,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13439,7 +15241,11 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_cancel_operation_from_dict(): client = BusinessGlossaryServiceClient( @@ -13456,6 +15262,8 @@ def test_cancel_operation_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13464,9 +15272,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -13477,7 +15283,8 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13496,10 +15303,13 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13521,6 +15331,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_get_operation_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13543,7 +15354,12 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13568,7 +15384,11 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_get_operation_from_dict(): client = BusinessGlossaryServiceClient( @@ -13585,6 +15405,8 @@ def test_get_operation_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13606,7 +15428,8 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13625,10 +15448,13 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13650,6 +15476,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_list_operations_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13672,7 +15499,12 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13697,7 +15529,11 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_list_operations_from_dict(): client = BusinessGlossaryServiceClient( @@ -13714,6 +15550,8 @@ def test_list_operations_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13735,7 +15573,8 @@ async def test_list_operations_from_dict_async(): def test_list_locations(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13754,10 +15593,13 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + + @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13779,6 +15621,7 @@ async def test_list_locations_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + def test_list_locations_field_headers(): client = BusinessGlossaryServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13801,7 +15644,12 @@ def test_list_locations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13826,7 +15674,11 @@ async def test_list_locations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_list_locations_from_dict(): client = BusinessGlossaryServiceClient( @@ -13843,6 +15695,8 @@ def test_list_locations_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13864,7 +15718,8 @@ async def test_list_locations_from_dict_async(): def test_get_location(transport: str = "grpc"): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13883,10 +15738,13 @@ def test_get_location(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + + @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13908,9 +15766,11 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + def test_get_location_field_headers(): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials()) + credentials=ga_credentials.AnonymousCredentials() + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -13929,7 +15789,12 @@ def test_get_location_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13954,7 +15819,11 @@ async def test_get_location_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + def test_get_location_from_dict(): client = BusinessGlossaryServiceClient( @@ -13971,6 +15840,8 @@ def test_get_location_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = BusinessGlossaryServiceAsyncClient( @@ -13992,10 +15863,11 @@ async def test_get_location_from_dict_async(): def test_transport_close_grpc(): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -14004,10 +15876,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = BusinessGlossaryServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -14015,10 +15888,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -14026,13 +15900,12 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = BusinessGlossaryServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -14041,10 +15914,20 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (BusinessGlossaryServiceClient, transports.BusinessGlossaryServiceGrpcTransport), - (BusinessGlossaryServiceAsyncClient, transports.BusinessGlossaryServiceGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -14059,7 +15942,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index c08cc9306417..87518b65c369 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -11218,13 +11218,85 @@ async def test_cancel_metadata_job_flattened_error_async(): ) -def test_create_entry_type_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryLinkRequest, + dict, + ], +) +def test_create_entry_link(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + response = client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_create_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + ) + + +def test_create_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11232,7 +11304,7 @@ def test_create_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_entry_type in client._transport._wrapped_methods + assert client._transport.create_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11240,208 +11312,350 @@ def test_create_entry_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_entry_type + client._transport.create_entry_link ] = mock_rpc - request = {} - client.create_entry_type(request) + client.create_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_type(request) + client.create_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_type_rest_required_fields( - request_type=catalog.CreateEntryTypeRequest, +@pytest.mark.asyncio +async def test_create_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_type_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped - assert "entryTypeId" not in jsonified_request + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.create_entry_link + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entry_link + ] = mock_rpc - jsonified_request["parent"] = "parent_value" - jsonified_request["entryTypeId"] = "entry_type_id_value" + request = {} + await client.create_entry_link(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "entry_type_id", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == "entry_type_id_value" + await client.create_entry_link(request) - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_create_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.CreateEntryLinkRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.create_entry_type(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.create_entry_link(request) - expected_params = [ - ( - "entryTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" -def test_create_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.create_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "entryTypeId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "entryTypeId", - "entryType", - ) - ) - ) +@pytest.mark.asyncio +async def test_create_entry_link_async_from_dict(): + await test_create_entry_link_async(request_type=dict) -def test_create_entry_type_rest_flattened(): +def test_create_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - entry_type=catalog.EntryType(name="name_value"), - entry_type_id="entry_type_id_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_entry_type(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_entry_link_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_link( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name="name_value") + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = "entry_link_id_value" + assert arg == mock_val -def test_create_entry_type_rest_flattened_error(transport: str = "rest"): +def test_create_entry_link_flattened_error(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry_type( - catalog.CreateEntryTypeRequest(), + client.create_entry_link( + catalog.CreateEntryLinkRequest(), parent="parent_value", - entry_type=catalog.EntryType(name="name_value"), - entry_type_id="entry_type_id_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) -def test_update_entry_type_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_link( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name="name_value") + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = "entry_link_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.DeleteEntryLinkRequest, + dict, + ], +) +def test_delete_entry_link(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + response = client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_delete_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryLinkRequest( + name="name_value", + ) + + +def test_delete_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11449,7 +11663,7 @@ def test_update_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_entry_type in client._transport._wrapped_methods + assert client._transport.delete_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11457,192 +11671,326 @@ def test_update_entry_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_entry_type + client._transport.delete_entry_link ] = mock_rpc - request = {} - client.update_entry_type(request) + client.delete_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + client.delete_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.update_entry_type(request) + # Ensure method has been cached + assert ( + client._client._transport.delete_entry_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entry_link + ] = mock_rpc + + request = {} + await client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_type_rest_required_fields( - request_type=catalog.UpdateEntryTypeRequest, +@pytest.mark.asyncio +async def test_delete_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryLinkRequest ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.delete_entry_link(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "update_mask", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +@pytest.mark.asyncio +async def test_delete_entry_link_async_from_dict(): + await test_delete_entry_link_async(request_type=dict) - # verify required fields with non-default values are left alone +def test_delete_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request) - response = client.update_entry_type(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.update_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "entryType", - "updateMask", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.delete_entry_link(request) -def test_update_entry_type_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_entry_link_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_link( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "entry_type": { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" - } - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - entry_type=catalog.EntryType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + +def test_delete_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_type(**mock_args) +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_link( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) -def test_update_entry_type_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetEntryLinkRequest, + dict, + ], +) +def test_get_entry_link(request_type, transport: str = "grpc"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", ) + response = client.get_entry_link(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request -def test_delete_entry_type_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_get_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryLinkRequest( + name="name_value", + ) + + +def test_get_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11650,175 +11998,250 @@ def test_delete_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_entry_type in client._transport._wrapped_methods + assert client._transport.get_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entry_type - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc request = {} - client.delete_entry_type(request) + client.get_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_type(request) + client.get_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_type_rest_required_fields( - request_type=catalog.DeleteEntryTypeRequest, +@pytest.mark.asyncio +async def test_get_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.get_entry_link + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entry_link + ] = mock_rpc - jsonified_request["name"] = "name_value" + request = {} + await client.get_entry_link(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.GetEntryLinkRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +@pytest.mark.asyncio +async def test_get_entry_link_async_from_dict(): + await test_get_entry_link_async(request_type=dict) + + +def test_get_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request) - response = client.delete_entry_type(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_delete_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.get_entry_link(request) -def test_delete_entry_type_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_entry_link_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_link( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_get_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_type(**mock_args) +@pytest.mark.asyncio +async def test_get_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_link( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_delete_entry_type_rest_flattened_error(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), + await client.get_entry_link( + catalog.GetEntryLinkRequest(), name="name_value", ) -def test_list_entry_types_rest_use_cached_wrapped_rpc(): +def test_create_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11832,7 +12255,7 @@ def test_list_entry_types_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entry_types in client._transport._wrapped_methods + assert client._transport.create_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11840,29 +12263,34 @@ def test_list_entry_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_entry_types + client._transport.create_entry_type ] = mock_rpc request = {} - client.list_entry_types(request) + client.create_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entry_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entry_types_rest_required_fields( - request_type=catalog.ListEntryTypesRequest, +def test_create_entry_type_rest_required_fields( + request_type=catalog.CreateEntryTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11870,26 +12298,28 @@ def test_list_entry_types_rest_required_fields( ) # verify fields with default values are dropped + assert "entryTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_types._get_unset_required_fields(jsonified_request) + ).create_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryTypeId"] = "entry_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_types._get_unset_required_fields(jsonified_request) + ).create_entry_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "entry_type_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -11897,6 +12327,8 @@ def test_list_entry_types_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == "entry_type_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11905,7 +12337,7 @@ def test_list_entry_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11917,49 +12349,57 @@ def test_list_entry_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_types(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - + response = client.create_entry_type(request) -def test_list_entry_types_rest_unset_required_fields(): + expected_params = [ + ( + "entryTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entry_types._get_unset_required_fields({}) + unset_fields = transport.create_entry_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "entryTypeId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "entryTypeId", + "entryType", ) ) - & set(("parent",)) ) -def test_list_entry_types_rest_flattened(): +def test_create_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11968,7 +12408,7 @@ def test_list_entry_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -11976,20 +12416,20 @@ def test_list_entry_types_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry_type=catalog.EntryType(name="name_value"), + entry_type_id="entry_type_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_types(**mock_args) + client.create_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -12001,7 +12441,7 @@ def test_list_entry_types_rest_flattened(): ) -def test_list_entry_types_rest_flattened_error(transport: str = "rest"): +def test_create_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12010,74 +12450,15 @@ def test_list_entry_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entry_types( - catalog.ListEntryTypesRequest(), + client.create_entry_type( + catalog.CreateEntryTypeRequest(), parent="parent_value", + entry_type=catalog.EntryType(name="name_value"), + entry_type_id="entry_type_id_value", ) -def test_list_entry_types_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token="abc", - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token="def", - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token="ghi", - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_entry_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryType) for i in results) - - pages = list(client.list_entry_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_type_rest_use_cached_wrapped_rpc(): +def test_update_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12091,33 +12472,40 @@ def test_get_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry_type in client._transport._wrapped_methods + assert client._transport.update_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_entry_type + ] = mock_rpc request = {} - client.get_entry_type(request) + client.update_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry_type(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): +def test_update_entry_type_rest_required_fields( + request_type=catalog.UpdateEntryTypeRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12128,21 +12516,24 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_type._get_unset_required_fields(jsonified_request) + ).update_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_type._get_unset_required_fields(jsonified_request) + ).update_entry_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12151,7 +12542,7 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12163,39 +12554,50 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_type(request) + response = client.update_entry_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_type_rest_unset_required_fields(): +def test_update_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "entryType", + "updateMask", + ) + ) + ) -def test_get_entry_type_rest_flattened(): +def test_update_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12204,42 +12606,44 @@ def test_get_entry_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" + "entry_type": { + "name": "projects/sample1/locations/sample2/entryTypes/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry_type=catalog.EntryType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_type(**mock_args) + client.update_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, + "%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" + % client.transport._host, args[1], ) -def test_get_entry_type_rest_flattened_error(transport: str = "rest"): +def test_update_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12248,13 +12652,14 @@ def test_get_entry_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry_type( - catalog.GetEntryTypeRequest(), - name="name_value", + client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_aspect_type_rest_use_cached_wrapped_rpc(): +def test_delete_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12268,9 +12673,7 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.delete_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12278,11 +12681,11 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_aspect_type + client._transport.delete_entry_type ] = mock_rpc request = {} - client.create_aspect_type(request) + client.delete_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -12291,21 +12694,20 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_aspect_type(request) + client.delete_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_aspect_type_rest_required_fields( - request_type=catalog.CreateAspectTypeRequest, +def test_delete_entry_type_rest_required_fields( + request_type=catalog.DeleteEntryTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["aspect_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12313,37 +12715,26 @@ def test_create_aspect_type_rest_required_fields( ) # verify fields with default values are dropped - assert "aspectTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aspect_type._get_unset_required_fields(jsonified_request) + ).delete_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["aspectTypeId"] = "aspect_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aspect_type._get_unset_required_fields(jsonified_request) + ).delete_entry_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "aspect_type_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == "aspect_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12364,10 +12755,9 @@ def test_create_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -12378,43 +12768,23 @@ def test_create_aspect_type_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_aspect_type(request) + response = client.delete_entry_type(request) - expected_params = [ - ( - "aspectTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_aspect_type_rest_unset_required_fields(): +def test_delete_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "aspectTypeId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "aspectTypeId", - "aspectType", - ) - ) - ) + unset_fields = transport.delete_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_aspect_type_rest_flattened(): +def test_delete_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12426,13 +12796,13 @@ def test_create_aspect_type_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/entryTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - aspect_type=catalog.AspectType(name="name_value"), - aspect_type_id="aspect_type_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -12444,20 +12814,19 @@ def test_create_aspect_type_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_aspect_type(**mock_args) + client.delete_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/aspectTypes" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1], ) -def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_delete_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12466,15 +12835,13 @@ def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent="parent_value", - aspect_type=catalog.AspectType(name="name_value"), - aspect_type_id="aspect_type_id_value", + client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name="name_value", ) -def test_update_aspect_type_rest_use_cached_wrapped_rpc(): +def test_list_entry_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12488,9 +12855,7 @@ def test_update_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.list_entry_types in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12498,32 +12863,29 @@ def test_update_aspect_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_aspect_type + client._transport.list_entry_types ] = mock_rpc request = {} - client.update_aspect_type(request) + client.list_entry_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_aspect_type(request) + client.list_entry_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_aspect_type_rest_required_fields( - request_type=catalog.UpdateAspectTypeRequest, +def test_list_entry_types_rest_required_fields( + request_type=catalog.ListEntryTypesRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12534,24 +12896,30 @@ def test_update_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aspect_type._get_unset_required_fields(jsonified_request) + ).list_entry_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aspect_type._get_unset_required_fields(jsonified_request) + ).list_entry_types._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12560,7 +12928,7 @@ def test_update_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListEntryTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12572,50 +12940,49 @@ def test_update_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_aspect_type(request) + response = client.list_entry_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_aspect_type_rest_unset_required_fields(): +def test_list_entry_types_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_aspect_type._get_unset_required_fields({}) + unset_fields = transport.list_entry_types._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "aspectType", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_aspect_type_rest_flattened(): +def test_list_entry_types_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12624,44 +12991,40 @@ def test_update_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListEntryTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - aspect_type=catalog.AspectType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_aspect_type(**mock_args) + client.list_entry_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1], ) -def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_list_entry_types_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12670,14 +13033,74 @@ def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent="parent_value", ) -def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): +def test_list_entry_types_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token="abc", + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token="def", + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token="ghi", + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entry_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryType) for i in results) + + pages = list(client.list_entry_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12691,39 +13114,29 @@ def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.get_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_aspect_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc request = {} - client.delete_aspect_type(request) + client.get_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_aspect_type(request) + client.get_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_aspect_type_rest_required_fields( - request_type=catalog.DeleteAspectTypeRequest, -): +def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} @@ -12738,7 +13151,7 @@ def test_delete_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aspect_type._get_unset_required_fields(jsonified_request) + ).get_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12747,9 +13160,7 @@ def test_delete_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aspect_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) + ).get_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12763,7 +13174,7 @@ def test_delete_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12775,36 +13186,39 @@ def test_delete_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_aspect_type(request) + response = client.get_entry_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_aspect_type_rest_unset_required_fields(): +def test_get_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + unset_fields = transport.get_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_aspect_type_rest_flattened(): +def test_get_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12813,11 +13227,11 @@ def test_delete_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryType() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + "name": "projects/sample1/locations/sample2/entryTypes/sample3" } # get truthy value for each flattened field @@ -12829,25 +13243,26 @@ def test_delete_aspect_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_aspect_type(**mock_args) + client.get_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1], ) -def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_get_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12856,13 +13271,13 @@ def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name="name_value", + client.get_entry_type( + catalog.GetEntryTypeRequest(), + name="name_value", ) -def test_list_aspect_types_rest_use_cached_wrapped_rpc(): +def test_create_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12876,7 +13291,9 @@ def test_list_aspect_types_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_aspect_types in client._transport._wrapped_methods + assert ( + client._transport.create_aspect_type in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12884,29 +13301,34 @@ def test_list_aspect_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_aspect_types + client._transport.create_aspect_type ] = mock_rpc request = {} - client.list_aspect_types(request) + client.create_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_aspect_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_aspect_types_rest_required_fields( - request_type=catalog.ListAspectTypesRequest, +def test_create_aspect_type_rest_required_fields( + request_type=catalog.CreateAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["aspect_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12914,26 +13336,28 @@ def test_list_aspect_types_rest_required_fields( ) # verify fields with default values are dropped + assert "aspectTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aspect_types._get_unset_required_fields(jsonified_request) + ).create_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["aspectTypeId"] = "aspect_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aspect_types._get_unset_required_fields(jsonified_request) + ).create_aspect_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "aspect_type_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -12941,6 +13365,8 @@ def test_list_aspect_types_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == "aspect_type_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12949,7 +13375,7 @@ def test_list_aspect_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12961,49 +13387,57 @@ def test_list_aspect_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_aspect_types(request) + response = client.create_aspect_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "aspectTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_aspect_types_rest_unset_required_fields(): +def test_create_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_aspect_types._get_unset_required_fields({}) + unset_fields = transport.create_aspect_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "aspectTypeId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "aspectTypeId", + "aspectType", ) ) - & set(("parent",)) ) -def test_list_aspect_types_rest_flattened(): +def test_create_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13012,7 +13446,7 @@ def test_list_aspect_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13020,20 +13454,20 @@ def test_list_aspect_types_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + aspect_type=catalog.AspectType(name="name_value"), + aspect_type_id="aspect_type_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_aspect_types(**mock_args) + client.create_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -13046,7 +13480,7 @@ def test_list_aspect_types_rest_flattened(): ) -def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): +def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13055,74 +13489,15 @@ def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_aspect_types( - catalog.ListAspectTypesRequest(), + client.create_aspect_type( + catalog.CreateAspectTypeRequest(), parent="parent_value", + aspect_type=catalog.AspectType(name="name_value"), + aspect_type_id="aspect_type_id_value", ) -def test_list_aspect_types_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token="abc", - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token="def", - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token="ghi", - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_aspect_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.AspectType) for i in results) - - pages = list(client.list_aspect_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_aspect_type_rest_use_cached_wrapped_rpc(): +def test_update_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13136,35 +13511,42 @@ def test_get_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_aspect_type in client._transport._wrapped_methods + assert ( + client._transport.update_aspect_type in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_aspect_type + ] = mock_rpc request = {} - client.get_aspect_type(request) + client.update_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_aspect_type(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_aspect_type_rest_required_fields( - request_type=catalog.GetAspectTypeRequest, +def test_update_aspect_type_rest_required_fields( + request_type=catalog.UpdateAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13175,21 +13557,24 @@ def test_get_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aspect_type._get_unset_required_fields(jsonified_request) + ).update_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aspect_type._get_unset_required_fields(jsonified_request) + ).update_aspect_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13198,7 +13583,7 @@ def test_get_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13210,39 +13595,50 @@ def test_get_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_aspect_type(request) + response = client.update_aspect_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_aspect_type_rest_unset_required_fields(): +def test_update_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "aspectType", + "updateMask", + ) + ) + ) -def test_get_aspect_type_rest_flattened(): +def test_update_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13251,43 +13647,44 @@ def test_get_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + aspect_type=catalog.AspectType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_aspect_type(**mock_args) + client.update_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" + "%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13296,13 +13693,14 @@ def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name="name_value", + client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_entry_group_rest_use_cached_wrapped_rpc(): +def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13317,7 +13715,7 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_entry_group in client._transport._wrapped_methods + client._transport.delete_aspect_type in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13326,11 +13724,11 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_entry_group + client._transport.delete_aspect_type ] = mock_rpc request = {} - client.create_entry_group(request) + client.delete_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13339,21 +13737,20 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_entry_group(request) + client.delete_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_group_rest_required_fields( - request_type=catalog.CreateEntryGroupRequest, +def test_delete_aspect_type_rest_required_fields( + request_type=catalog.DeleteAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["entry_group_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13361,37 +13758,26 @@ def test_create_entry_group_rest_required_fields( ) # verify fields with default values are dropped - assert "entryGroupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry_group._get_unset_required_fields(jsonified_request) + ).delete_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["entryGroupId"] = "entry_group_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry_group._get_unset_required_fields(jsonified_request) + ).delete_aspect_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "entry_group_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == "entry_group_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13412,10 +13798,9 @@ def test_create_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13426,43 +13811,23 @@ def test_create_entry_group_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_group(request) + response = client.delete_aspect_type(request) - expected_params = [ - ( - "entryGroupId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_entry_group_rest_unset_required_fields(): +def test_delete_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "entryGroupId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "entryGroupId", - "entryGroup", - ) - ) - ) + unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_entry_group_rest_flattened(): +def test_delete_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13474,13 +13839,13 @@ def test_create_entry_group_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - entry_group=catalog.EntryGroup(name="name_value"), - entry_group_id="entry_group_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -13492,20 +13857,20 @@ def test_create_entry_group_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_group(**mock_args) + client.delete_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/entryGroups" + "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_create_entry_group_rest_flattened_error(transport: str = "rest"): +def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13514,15 +13879,13 @@ def test_create_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent="parent_value", - entry_group=catalog.EntryGroup(name="name_value"), - entry_group_id="entry_group_id_value", + client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name="name_value", ) -def test_update_entry_group_rest_use_cached_wrapped_rpc(): +def test_list_aspect_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13536,9 +13899,7 @@ def test_update_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_entry_group in client._transport._wrapped_methods - ) + assert client._transport.list_aspect_types in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13546,32 +13907,29 @@ def test_update_entry_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_entry_group + client._transport.list_aspect_types ] = mock_rpc request = {} - client.update_entry_group(request) + client.list_aspect_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_group(request) + client.list_aspect_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_group_rest_required_fields( - request_type=catalog.UpdateEntryGroupRequest, +def test_list_aspect_types_rest_required_fields( + request_type=catalog.ListAspectTypesRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13582,24 +13940,30 @@ def test_update_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry_group._get_unset_required_fields(jsonified_request) + ).list_aspect_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry_group._get_unset_required_fields(jsonified_request) + ).list_aspect_types._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13608,7 +13972,7 @@ def test_update_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListAspectTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13620,50 +13984,49 @@ def test_update_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_group(request) + response = client.list_aspect_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_entry_group_rest_unset_required_fields(): +def test_list_aspect_types_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_entry_group._get_unset_required_fields({}) + unset_fields = transport.list_aspect_types._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "entryGroup", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_entry_group_rest_flattened(): +def test_list_aspect_types_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13672,44 +14035,41 @@ def test_update_entry_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListAspectTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - entry_group=catalog.EntryGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_group(**mock_args) + client.list_aspect_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1], ) -def test_update_entry_group_rest_flattened_error(transport: str = "rest"): +def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13718,14 +14078,74 @@ def test_update_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent="parent_value", ) -def test_delete_entry_group_rest_use_cached_wrapped_rpc(): +def test_list_aspect_types_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token="abc", + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token="def", + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token="ghi", + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_aspect_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.AspectType) for i in results) + + pages = list(client.list_aspect_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13739,38 +14159,30 @@ def test_delete_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_entry_group in client._transport._wrapped_methods - ) + assert client._transport.get_aspect_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entry_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc request = {} - client.delete_entry_group(request) + client.get_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_group(request) + client.get_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_group_rest_required_fields( - request_type=catalog.DeleteEntryGroupRequest, +def test_get_aspect_type_rest_required_fields( + request_type=catalog.GetAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport @@ -13786,7 +14198,7 @@ def test_delete_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_group._get_unset_required_fields(jsonified_request) + ).get_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13795,9 +14207,7 @@ def test_delete_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) + ).get_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13811,7 +14221,7 @@ def test_delete_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13823,36 +14233,39 @@ def test_delete_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_group(request) + response = client.get_aspect_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_entry_group_rest_unset_required_fields(): +def test_get_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + unset_fields = transport.get_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_entry_group_rest_flattened(): +def test_get_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13861,11 +14274,11 @@ def test_delete_entry_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" } # get truthy value for each flattened field @@ -13877,25 +14290,27 @@ def test_delete_entry_group_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_group(**mock_args) + client.get_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): +def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13904,13 +14319,13 @@ def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), + client.get_aspect_type( + catalog.GetAspectTypeRequest(), name="name_value", ) -def test_list_entry_groups_rest_use_cached_wrapped_rpc(): +def test_create_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13924,7 +14339,9 @@ def test_list_entry_groups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entry_groups in client._transport._wrapped_methods + assert ( + client._transport.create_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13932,29 +14349,34 @@ def test_list_entry_groups_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_entry_groups + client._transport.create_entry_group ] = mock_rpc request = {} - client.list_entry_groups(request) + client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entry_groups(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entry_groups_rest_required_fields( - request_type=catalog.ListEntryGroupsRequest, +def test_create_entry_group_rest_required_fields( + request_type=catalog.CreateEntryGroupRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_group_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13962,26 +14384,28 @@ def test_list_entry_groups_rest_required_fields( ) # verify fields with default values are dropped + assert "entryGroupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_groups._get_unset_required_fields(jsonified_request) + ).create_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryGroupId"] = "entry_group_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_groups._get_unset_required_fields(jsonified_request) + ).create_entry_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "entry_group_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -13989,6 +14413,8 @@ def test_list_entry_groups_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == "entry_group_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13997,7 +14423,7 @@ def test_list_entry_groups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14009,49 +14435,57 @@ def test_list_entry_groups_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_groups(request) + response = client.create_entry_group(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entryGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_entry_groups_rest_unset_required_fields(): +def test_create_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entry_groups._get_unset_required_fields({}) + unset_fields = transport.create_entry_group._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "entryGroupId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "entryGroupId", + "entryGroup", ) ) - & set(("parent",)) ) -def test_list_entry_groups_rest_flattened(): +def test_create_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14060,7 +14494,7 @@ def test_list_entry_groups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -14068,20 +14502,20 @@ def test_list_entry_groups_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry_group=catalog.EntryGroup(name="name_value"), + entry_group_id="entry_group_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_groups(**mock_args) + client.create_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -14094,7 +14528,7 @@ def test_list_entry_groups_rest_flattened(): ) -def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): +def test_create_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14103,74 +14537,15 @@ def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entry_groups( - catalog.ListEntryGroupsRequest(), + client.create_entry_group( + catalog.CreateEntryGroupRequest(), parent="parent_value", + entry_group=catalog.EntryGroup(name="name_value"), + entry_group_id="entry_group_id_value", ) -def test_list_entry_groups_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token="abc", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token="def", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token="ghi", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_entry_groups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryGroup) for i in results) - - pages = list(client.list_entry_groups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_group_rest_use_cached_wrapped_rpc(): +def test_update_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14184,35 +14559,42 @@ def test_get_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry_group in client._transport._wrapped_methods + assert ( + client._transport.update_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_entry_group + ] = mock_rpc request = {} - client.get_entry_group(request) + client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry_group(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_group_rest_required_fields( - request_type=catalog.GetEntryGroupRequest, +def test_update_entry_group_rest_required_fields( + request_type=catalog.UpdateEntryGroupRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14223,21 +14605,24 @@ def test_get_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_group._get_unset_required_fields(jsonified_request) + ).update_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_group._get_unset_required_fields(jsonified_request) + ).update_entry_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14246,7 +14631,7 @@ def test_get_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14258,84 +14643,96 @@ def test_get_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_group(request) + response = client.update_entry_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_group_rest_unset_required_fields(): +def test_update_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + unset_fields = transport.update_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "entryGroup", + "updateMask", + ) + ) + ) + + +def test_update_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry_group=catalog.EntryGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_group(**mock_args) + client.update_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_get_entry_group_rest_flattened_error(transport: str = "rest"): +def test_update_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14344,13 +14741,14 @@ def test_get_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry_group( - catalog.GetEntryGroupRequest(), - name="name_value", + client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_entry_rest_use_cached_wrapped_rpc(): +def test_delete_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14364,34 +14762,43 @@ def test_create_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_entry in client._transport._wrapped_methods + assert ( + client._transport.delete_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_entry_group + ] = mock_rpc request = {} - client.create_entry(request) + client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_entry(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): +def test_delete_entry_group_rest_required_fields( + request_type=catalog.DeleteEntryGroupRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["entry_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14399,32 +14806,26 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque ) # verify fields with default values are dropped - assert "entryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry._get_unset_required_fields(jsonified_request) + ).delete_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == request_init["entry_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["entryId"] = "entry_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry._get_unset_required_fields(jsonified_request) + ).delete_entry_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_id",)) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == "entry_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14433,7 +14834,7 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14445,55 +14846,36 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry(request) + response = client.delete_entry_group(request) - expected_params = [ - ( - "entryId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_entry_rest_unset_required_fields(): +def test_delete_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_entry._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("entryId",)) - & set( - ( - "parent", - "entryId", - "entry", - ) - ) - ) + unset_fields = transport.delete_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_entry_rest_flattened(): +def test_delete_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14502,45 +14884,41 @@ def test_create_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + "name": "projects/sample1/locations/sample2/entryGroups/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - entry=catalog.Entry(name="name_value"), - entry_id="entry_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry(**mock_args) + client.delete_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" + "%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_create_entry_rest_flattened_error(transport: str = "rest"): +def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14549,15 +14927,13 @@ def test_create_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry( - catalog.CreateEntryRequest(), - parent="parent_value", - entry=catalog.Entry(name="name_value"), - entry_id="entry_id_value", + client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name="name_value", ) -def test_update_entry_rest_use_cached_wrapped_rpc(): +def test_list_entry_groups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14571,32 +14947,37 @@ def test_update_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_entry in client._transport._wrapped_methods + assert client._transport.list_entry_groups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_entry_groups + ] = mock_rpc request = {} - client.update_entry(request) + client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_entry(request) + client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): +def test_list_entry_groups_rest_required_fields( + request_type=catalog.ListEntryGroupsRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14607,26 +14988,30 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry._get_unset_required_fields(jsonified_request) + ).list_entry_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry._get_unset_required_fields(jsonified_request) + ).list_entry_groups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "aspect_keys", - "delete_missing_aspects", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14635,7 +15020,7 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.ListEntryGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14647,50 +15032,49 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry(request) + response = client.list_entry_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_entry_rest_unset_required_fields(): +def test_list_entry_groups_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_entry._get_unset_required_fields({}) + unset_fields = transport.list_entry_groups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "aspectKeys", - "deleteMissingAspects", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - & set(("entry",)) + & set(("parent",)) ) -def test_update_entry_rest_flattened(): +def test_list_entry_groups_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14699,19 +15083,14 @@ def test_update_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.ListEntryGroupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - entry=catalog.Entry(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -14719,26 +15098,26 @@ def test_update_entry_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry(**mock_args) + client.list_entry_groups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1], ) -def test_update_entry_rest_flattened_error(transport: str = "rest"): +def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14747,50 +15126,112 @@ def test_update_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_delete_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent="parent_value", ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert client._transport.delete_entry in client._transport._wrapped_methods +def test_list_entry_groups_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token="abc", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token="def", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token="ghi", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), ) - client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entry_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryGroup) for i in results) + + pages = list(client.list_entry_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc request = {} - client.delete_entry(request) + client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_entry(request) + client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): +def test_get_entry_group_rest_required_fields( + request_type=catalog.GetEntryGroupRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} @@ -14805,7 +15246,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry._get_unset_required_fields(jsonified_request) + ).get_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14814,7 +15255,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry._get_unset_required_fields(jsonified_request) + ).get_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14828,7 +15269,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.EntryGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14840,7 +15281,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -14849,30 +15290,30 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry(request) + response = client.get_entry_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_entry_rest_unset_required_fields(): +def test_get_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_entry._get_unset_required_fields({}) + unset_fields = transport.get_entry_group._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_entry_rest_flattened(): +def test_get_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14881,11 +15322,11 @@ def test_delete_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.EntryGroup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + "name": "projects/sample1/locations/sample2/entryGroups/sample3" } # get truthy value for each flattened field @@ -14898,26 +15339,26 @@ def test_delete_entry_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry(**mock_args) + client.get_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_delete_entry_rest_flattened_error(transport: str = "rest"): +def test_get_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14926,13 +15367,13 @@ def test_delete_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry( - catalog.DeleteEntryRequest(), + client.get_entry_group( + catalog.GetEntryGroupRequest(), name="name_value", ) -def test_list_entries_rest_use_cached_wrapped_rpc(): +def test_create_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14946,33 +15387,34 @@ def test_list_entries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entries in client._transport._wrapped_methods + assert client._transport.create_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc request = {} - client.list_entries(request) + client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entries(request) + client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): +def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14980,32 +15422,32 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque ) # verify fields with default values are dropped + assert "entryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entries._get_unset_required_fields(jsonified_request) + ).create_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == request_init["entry_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryId"] = "entry_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entries._get_unset_required_fields(jsonified_request) + ).create_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("entry_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == "entry_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15014,7 +15456,7 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15026,48 +15468,55 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entries(request) + response = client.create_entry(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_entries_rest_unset_required_fields(): +def test_create_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entries._get_unset_required_fields({}) + unset_fields = transport.create_entry._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("entryId",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "parent", + "entryId", + "entry", ) ) - & set(("parent",)) ) -def test_list_entries_rest_flattened(): +def test_create_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15076,7 +15525,7 @@ def test_list_entries_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() + return_value = catalog.Entry() # get arguments that satisfy an http rule for this method sample_request = { @@ -15086,6 +15535,8 @@ def test_list_entries_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry=catalog.Entry(name="name_value"), + entry_id="entry_id_value", ) mock_args.update(sample_request) @@ -15093,13 +15544,13 @@ def test_list_entries_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entries(**mock_args) + client.create_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -15112,7 +15563,7 @@ def test_list_entries_rest_flattened(): ) -def test_list_entries_rest_flattened_error(transport: str = "rest"): +def test_create_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15121,116 +15572,54 @@ def test_list_entries_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entries( - catalog.ListEntriesRequest(), + client.create_entry( + catalog.CreateEntryRequest(), parent="parent_value", + entry=catalog.Entry(name="name_value"), + entry_id="entry_id_value", ) -def test_list_entries_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token="abc", - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token="def", - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token="ghi", - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), +def test_update_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/entryGroups/sample3" - } - - pager = client.list_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.Entry) for i in results) - - pages = list(client.list_entries(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry in client._transport._wrapped_methods + assert client._transport.update_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc request = {} - client.get_entry(request) + client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry(request) + client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): +def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15241,29 +15630,26 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry._get_unset_required_fields(jsonified_request) + ).update_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry._get_unset_required_fields(jsonified_request) + ).update_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "aspect_types", - "paths", - "view", + "allow_missing", + "aspect_keys", + "delete_missing_aspects", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15284,9 +15670,10 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15300,32 +15687,33 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry(request) + response = client.update_entry(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_rest_unset_required_fields(): +def test_update_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry._get_unset_required_fields({}) + unset_fields = transport.update_entry._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "aspectTypes", - "paths", - "view", + "allowMissing", + "aspectKeys", + "deleteMissingAspects", + "updateMask", ) ) - & set(("name",)) + & set(("entry",)) ) -def test_get_entry_rest_flattened(): +def test_update_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15338,12 +15726,15 @@ def test_get_entry_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry=catalog.Entry(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -15357,20 +15748,20 @@ def test_get_entry_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry(**mock_args) + client.update_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1], ) -def test_get_entry_rest_flattened_error(transport: str = "rest"): +def test_update_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15379,13 +15770,14 @@ def test_get_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry( - catalog.GetEntryRequest(), - name="name_value", + client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_lookup_entry_rest_use_cached_wrapped_rpc(): +def test_delete_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15399,34 +15791,33 @@ def test_lookup_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.lookup_entry in client._transport._wrapped_methods + assert client._transport.delete_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc request = {} - client.lookup_entry(request) + client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.lookup_entry(request) + client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): +def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["name"] = "" - request_init["entry"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15434,39 +15825,24 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque ) # verify fields with default values are dropped - assert "entry" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lookup_entry._get_unset_required_fields(jsonified_request) + ).delete_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entry" in jsonified_request - assert jsonified_request["entry"] == request_init["entry"] jsonified_request["name"] = "name_value" - jsonified_request["entry"] = "entry_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lookup_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "aspect_types", - "entry", - "paths", - "view", - ) - ) + ).delete_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "entry" in jsonified_request - assert jsonified_request["entry"] == "entry_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15487,7 +15863,7 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -15503,44 +15879,83 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lookup_entry(request) + response = client.delete_entry(request) - expected_params = [ - ( - "entry", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lookup_entry_rest_unset_required_fields(): +def test_delete_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lookup_entry._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "aspectTypes", - "entry", - "paths", - "view", - ) + unset_fields = transport.delete_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - & set( - ( - "name", - "entry", - ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + % client.transport._host, + args[1], ) + + +def test_delete_entry_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + catalog.DeleteEntryRequest(), + name="name_value", + ) + -def test_search_entries_rest_use_cached_wrapped_rpc(): +def test_list_entries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15554,34 +15969,33 @@ def test_search_entries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_entries in client._transport._wrapped_methods + assert client._transport.list_entries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc request = {} - client.search_entries(request) + client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_entries(request) + client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): +def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" - request_init["query"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15589,41 +16003,32 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR ) # verify fields with default values are dropped - assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_entries._get_unset_required_fields(jsonified_request) + ).list_entries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "query" in jsonified_request - assert jsonified_request["query"] == request_init["query"] - jsonified_request["name"] = "name_value" - jsonified_request["query"] = "query_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_entries._get_unset_required_fields(jsonified_request) + ).list_entries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "order_by", + "filter", "page_size", "page_token", - "query", - "scope", - "semantic_search", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15632,7 +16037,7 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() + return_value = catalog.ListEntriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15644,7 +16049,7 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -15653,53 +16058,39 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_entries(request) + response = client.list_entries(request) - expected_params = [ - ( - "query", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_entries_rest_unset_required_fields(): +def test_list_entries_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_entries._get_unset_required_fields({}) + unset_fields = transport.list_entries._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "orderBy", + "filter", "pageSize", "pageToken", - "query", - "scope", - "semanticSearch", - ) - ) - & set( - ( - "name", - "query", ) ) + & set(("parent",)) ) -def test_search_entries_rest_flattened(): +def test_list_entries_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15708,15 +16099,16 @@ def test_search_entries_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() + return_value = catalog.ListEntriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", - query="query_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -15724,26 +16116,26 @@ def test_search_entries_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_entries(**mock_args) + client.list_entries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*}:searchEntries" + "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1], ) -def test_search_entries_rest_flattened_error(transport: str = "rest"): +def test_list_entries_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15752,14 +16144,13 @@ def test_search_entries_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_entries( - catalog.SearchEntriesRequest(), - name="name_value", - query="query_value", + client.list_entries( + catalog.ListEntriesRequest(), + parent="parent_value", ) -def test_search_entries_rest_pager(transport: str = "rest"): +def test_list_entries_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15771,28 +16162,28 @@ def test_search_entries_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), ], next_page_token="abc", ), - catalog.SearchEntriesResponse( - results=[], + catalog.ListEntriesResponse( + entries=[], next_page_token="def", ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), ], next_page_token="ghi", ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), ], ), ) @@ -15800,27 +16191,29 @@ def test_search_entries_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) + response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"name": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } - pager = client.search_entries(request=sample_request) + pager = client.list_entries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) for i in results) + assert all(isinstance(i, catalog.Entry) for i in results) - pages = list(client.search_entries(request=sample_request).pages) + pages = list(client.list_entries(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_create_metadata_job_rest_use_cached_wrapped_rpc(): +def test_get_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15834,43 +16227,33 @@ def test_create_metadata_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_metadata_job in client._transport._wrapped_methods - ) + assert client._transport.get_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc request = {} - client.create_metadata_job(request) + client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_metadata_job(request) + client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_metadata_job_rest_required_fields( - request_type=catalog.CreateMetadataJobRequest, -): +def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15881,28 +16264,29 @@ def test_create_metadata_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_metadata_job._get_unset_required_fields(jsonified_request) + ).get_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_metadata_job._get_unset_required_fields(jsonified_request) + ).get_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "metadata_job_id", - "validate_only", + "aspect_types", + "paths", + "view", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15911,7 +16295,7 @@ def test_create_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15923,50 +16307,48 @@ def test_create_metadata_job_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_metadata_job(request) + response = client.get_entry(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_metadata_job_rest_unset_required_fields(): +def test_get_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_metadata_job._get_unset_required_fields({}) + unset_fields = transport.get_entry._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "metadataJobId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "metadataJob", + "aspectTypes", + "paths", + "view", ) ) + & set(("name",)) ) -def test_create_metadata_job_rest_flattened(): +def test_get_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15975,41 +16357,43 @@ def test_create_metadata_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - metadata_job=catalog.MetadataJob(name="name_value"), - metadata_job_id="metadata_job_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_metadata_job(**mock_args) + client.get_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1], ) -def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): +def test_get_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16018,15 +16402,13 @@ def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent="parent_value", - metadata_job=catalog.MetadataJob(name="name_value"), - metadata_job_id="metadata_job_id_value", + client.get_entry( + catalog.GetEntryRequest(), + name="name_value", ) -def test_get_metadata_job_rest_use_cached_wrapped_rpc(): +def test_lookup_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16040,37 +16422,34 @@ def test_get_metadata_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_metadata_job in client._transport._wrapped_methods + assert client._transport.lookup_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc request = {} - client.get_metadata_job(request) + client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_metadata_job(request) + client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_metadata_job_rest_required_fields( - request_type=catalog.GetMetadataJobRequest, -): +def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["name"] = "" + request_init["entry"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16078,24 +16457,39 @@ def test_get_metadata_job_rest_required_fields( ) # verify fields with default values are dropped + assert "entry" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata_job._get_unset_required_fields(jsonified_request) + ).lookup_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entry" in jsonified_request + assert jsonified_request["entry"] == request_init["entry"] jsonified_request["name"] = "name_value" + jsonified_request["entry"] = "entry_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata_job._get_unset_required_fields(jsonified_request) + ).lookup_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "aspect_types", + "entry", + "paths", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "entry" in jsonified_request + assert jsonified_request["entry"] == "entry_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16104,7 +16498,7 @@ def test_get_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16125,90 +16519,51 @@ def test_get_metadata_job_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_metadata_job(request) + response = client.lookup_entry(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entry", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_metadata_job_rest_unset_required_fields(): +def test_lookup_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.lookup_entry._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "aspectTypes", + "entry", + "paths", + "view", + ) + ) + & set( + ( + "name", + "entry", + ) + ) + ) -def test_get_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/metadataJobs/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_metadata_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/metadataJobs/*}" - % client.transport._host, - args[1], - ) - - -def test_get_metadata_job_rest_flattened_error(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name="name_value", - ) - - -def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): +def test_search_entries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16222,39 +16577,34 @@ def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_metadata_jobs in client._transport._wrapped_methods - ) + assert client._transport.search_entries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc request = {} - client.list_metadata_jobs(request) + client.search_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_metadata_jobs(request) + client.search_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_metadata_jobs_rest_required_fields( - request_type=catalog.ListMetadataJobsRequest, -): +def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["query"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16262,33 +16612,41 @@ def test_list_metadata_jobs_rest_required_fields( ) # verify fields with default values are dropped + assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + ).search_entries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + ).search_entries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", "order_by", "page_size", "page_token", + "query", + "scope", + "semantic_search", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16297,7 +16655,7 @@ def test_list_metadata_jobs_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() + return_value = catalog.SearchEntriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16309,7 +16667,7 @@ def test_list_metadata_jobs_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -16318,40 +16676,53 @@ def test_list_metadata_jobs_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_metadata_jobs(request) + response = client.search_entries(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_metadata_jobs_rest_unset_required_fields(): +def test_search_entries_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) + unset_fields = transport.search_entries._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", "orderBy", "pageSize", "pageToken", + "query", + "scope", + "semanticSearch", + ) + ) + & set( + ( + "name", + "query", ) ) - & set(("parent",)) ) -def test_list_metadata_jobs_rest_flattened(): +def test_search_entries_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16360,14 +16731,15 @@ def test_list_metadata_jobs_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() + return_value = catalog.SearchEntriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", + query="query_value", ) mock_args.update(sample_request) @@ -16375,26 +16747,26 @@ def test_list_metadata_jobs_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_metadata_jobs(**mock_args) + client.search_entries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + "%s/v1/{name=projects/*/locations/*}:searchEntries" % client.transport._host, args[1], ) -def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): +def test_search_entries_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16403,13 +16775,14 @@ def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent="parent_value", + client.search_entries( + catalog.SearchEntriesRequest(), + name="name_value", + query="query_value", ) -def test_list_metadata_jobs_rest_pager(transport: str = "rest"): +def test_search_entries_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16421,28 +16794,28 @@ def test_list_metadata_jobs_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), ], next_page_token="abc", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], + catalog.SearchEntriesResponse( + results=[], next_page_token="def", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), ], next_page_token="ghi", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), ], ), ) @@ -16450,27 +16823,27 @@ def test_list_metadata_jobs_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) + response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2"} - pager = client.list_metadata_jobs(request=sample_request) + pager = client.search_entries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, catalog.MetadataJob) for i in results) + assert all(isinstance(i, catalog.SearchEntriesResult) for i in results) - pages = list(client.list_metadata_jobs(request=sample_request).pages) + pages = list(client.search_entries(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): +def test_create_metadata_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16485,7 +16858,7 @@ def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.cancel_metadata_job in client._transport._wrapped_methods + client._transport.create_metadata_job in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16494,29 +16867,33 @@ def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_metadata_job + client._transport.create_metadata_job ] = mock_rpc request = {} - client.cancel_metadata_job(request) + client.create_metadata_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_metadata_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_cancel_metadata_job_rest_required_fields( - request_type=catalog.CancelMetadataJobRequest, +def test_create_metadata_job_rest_required_fields( + request_type=catalog.CreateMetadataJobRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16527,21 +16904,28 @@ def test_cancel_metadata_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + ).create_metadata_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + ).create_metadata_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "metadata_job_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16550,7 +16934,7 @@ def test_cancel_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16570,72 +16954,85 @@ def test_cancel_metadata_job_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_metadata_job(request) + response = client.create_metadata_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_cancel_metadata_job_rest_unset_required_fields(): +def test_create_metadata_job_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_cancel_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/metadataJobs/sample3" - } + unset_fields = transport.create_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "metadataJobId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "metadataJob", + ) + ) + ) + + +def test_create_metadata_job_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_metadata_job(**mock_args) + client.create_metadata_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" + "%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1], ) -def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): +def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16644,1461 +17041,3608 @@ def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name="name_value", + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_get_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - transport=transport, - ) + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.get_metadata_job + ] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.get_metadata_job(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CatalogServiceClient(transport=transport) - assert client.transport is transport + client.get_metadata_job(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.CatalogServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_metadata_job_rest_required_fields( + request_type=catalog.GetMetadataJobRequest, +): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - channel = transport.grpc_channel - assert channel + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - transports.CatalogServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_transport_kind_grpc(): - transport = CatalogServiceClient.get_transport_class("grpc")( + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_initialize_client_w_grpc(): client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert client is not None + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_entry_type(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() + response = client.get_metadata_job(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_get_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_entry_type(request=None) + unset_fields = transport.get_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_type_empty_call_grpc(): +def test_get_metadata_job_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/metadataJobs/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() + client.get_metadata_job(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/metadataJobs/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_type_empty_call_grpc(): +def test_get_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - assert args[0] == request_msg +def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.list_metadata_jobs in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_aspect_type(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_metadata_jobs + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() + request = {} + client.list_metadata_jobs(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_metadata_jobs(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_aspect_type(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() +def test_list_metadata_jobs_rest_required_fields( + request_type=catalog.ListMetadataJobsRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_aspect_type(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_aspect_types_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), "__call__" - ) as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request=None) + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_metadata_jobs(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - assert args[0] == request_msg +def test_list_metadata_jobs_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_grpc(): +def test_list_metadata_jobs_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_entry_group(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() + client.list_metadata_jobs(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_group_empty_call_grpc(): +def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_groups_empty_call_grpc(): +def test_list_metadata_jobs_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), "__call__" - ) as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + ) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() + # Wrap the values into proper Response objs + response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_metadata_jobs(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request=None) + pages = list(client.list_metadata_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - assert args[0] == request_msg +def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.cancel_metadata_job in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.create_entry(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_metadata_job + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() + request = {} + client.cancel_metadata_job(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.cancel_metadata_job(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.update_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() +def test_cancel_metadata_job_rest_required_fields( + request_type=catalog.CancelMetadataJobRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.delete_entry(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entries_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entries), "__call__") as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request=None) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_metadata_job(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.get_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() +def test_cancel_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lookup_entry_empty_call_grpc(): +def test_cancel_metadata_job_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/metadataJobs/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_entries), "__call__") as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() + client.cancel_metadata_job(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_metadata_job_empty_call_grpc(): +def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_metadata_job(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - assert args[0] == request_msg +def test_create_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.create_entry_link in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entry_link + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() + request = {} + client.create_entry_link(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_entry_link(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_metadata_jobs_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), "__call__" - ) as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() +def test_create_entry_link_rest_required_fields( + request_type=catalog.CreateEntryLinkRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request_init["entry_link_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped + assert "entryLinkId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == request_init["entry_link_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["entryLinkId"] = "entry_link_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entry_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_link_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == "entry_link_id_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_metadata_job_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), "__call__" - ) as call: - call.return_value = None - client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - assert args[0] == request_msg + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_transport_kind_grpc_asyncio(): - transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_link(request) -def test_initialize_client_w_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + expected_params = [ + ( + "entryLinkId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + unset_fields = transport.create_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("entryLinkId",)) + & set( + ( + "parent", + "entryLinkId", + "entryLink", + ) ) - await client.create_entry_type(request=None) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - assert args[0] == request_msg +def test_create_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) - await client.update_entry_type(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.create_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) - await client.delete_entry_type(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - assert args[0] == request_msg +def test_delete_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.delete_entry_link in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntryTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() + client._transport._wrapped_methods[ + client._transport.delete_entry_link + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.delete_entry_link(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.delete_entry_link(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.EntryType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - type_aliases=["type_aliases_value"], - platform="platform_value", - system="system_value", - ) - ) - await client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_entry_link_rest_required_fields( + request_type=catalog.DeleteEntryLinkRequest, +): + transport_class = transports.CatalogServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_aspect_type(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_aspect_type(request=None) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_aspect_type(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() + response = client.delete_entry_link(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_aspect_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListAspectTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_aspect_types(request=None) + unset_fields = transport.delete_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - assert args[0] == request_msg +def test_delete_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.AspectType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.get_aspect_type(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.delete_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) - await client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") +def test_get_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - await client.update_entry_group(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._transport.get_entry_link in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = {} + client.get_entry_link(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_entry_group(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() + client.get_entry_link(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_groups_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_entry_link_rest_required_fields(request_type=catalog.GetEntryLinkRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntryGroupsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_entry_groups(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.EntryGroup( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - ) - await client.get_entry_group(request=None) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.create_entry(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() + response = client.get_entry_link(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.update_entry(request=None) + unset_fields = transport.get_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - assert args[0] == request_msg +def test_get_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_link(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entries), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntriesResponse( - next_page_token="next_page_token_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), + name="name_value", ) - await client.list_entries(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - assert args[0] == request_msg +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # It is an error to provide a credentials file and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # It is an error to provide an api_key and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + transport=transport, + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - await client.get_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() + # It is an error to provide scopes and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - assert args[0] == request_msg +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CatalogServiceClient(transport=transport) + assert client.transport is transport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_lookup_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + channel = transport.grpc_channel + assert channel - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.lookup_entry(request=None) + transport = transports.CatalogServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - assert args[0] == request_msg +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + transports.CatalogServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = CatalogServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_entries), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.SearchEntriesResponse( - total_size=1086, - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.search_entries(request=None) + with mock.patch.object( + type(client.transport.create_entry_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() + request_msg = catalog.CreateEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_metadata_job), "__call__" + type(client.transport.update_entry_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_metadata_job(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() + request_msg = catalog.UpdateEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.MetadataJob( - name="name_value", - uid="uid_value", - type_=catalog.MetadataJob.Type.IMPORT, - ) - ) - await client.get_metadata_job(request=None) + with mock.patch.object( + type(client.transport.delete_entry_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() + request_msg = catalog.DeleteEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_metadata_jobs_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_entry_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_metadata_jobs), "__call__" + type(client.transport.create_aspect_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListMetadataJobsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_metadata_jobs(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_aspect_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() + request_msg = catalog.CreateAspectTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.cancel_metadata_job), "__call__" + type(client.transport.update_aspect_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_aspect_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_aspect_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), "__call__" + ) as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() assert args[0] == request_msg -def test_transport_kind_rest(): - transport = CatalogServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_groups_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), "__call__" + ) as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntryTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + type_aliases=["type_aliases_value"], + platform="platform_value", + system="system_value", + ) + ) + await client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_aspect_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListAspectTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.AspectType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + ) + await client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_groups_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntryGroupsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryGroup( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + ) + await client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lookup_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.SearchEntriesResponse( + total_size=1086, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CatalogServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_entry_type_rest_bad_request( + request_type=catalog.CreateEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryTypeRequest, + dict, + ], +) +def test_create_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["entry_type"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "type_aliases": ["type_aliases_value1", "type_aliases_value2"], + "platform": "platform_value", + "system": "system_value", + "required_aspects": [{"type_": "type__value"}], + "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_create_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entry_type_rest_bad_request( + request_type=catalog.UpdateEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.UpdateEntryTypeRequest, + dict, + ], +) +def test_update_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + } + request_init["entry_type"] = { + "name": "projects/sample1/locations/sample2/entryTypes/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "type_aliases": ["type_aliases_value1", "type_aliases_value2"], + "platform": "platform_value", + "system": "system_value", + "required_aspects": [{"type_": "type__value"}], + "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_update_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_update_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.UpdateEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_type_rest_bad_request( + request_type=catalog.DeleteEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.DeleteEntryTypeRequest, + dict, + ], +) +def test_delete_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_delete_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.DeleteEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entry_types(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListEntryTypesRequest, + dict, + ], +) +def test_list_entry_types_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entry_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entry_types_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_list_entry_types" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_list_entry_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListEntryTypesResponse.to_json( + catalog.ListEntryTypesResponse() + ) + req.return_value.content = return_value + + request = catalog.ListEntryTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListEntryTypesResponse() + post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata + + client.list_entry_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetEntryTypeRequest, + dict, + ], +) +def test_get_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + type_aliases=["type_aliases_value"], + platform="platform_value", + system="system_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.type_aliases == ["type_aliases_value"] + assert response.platform == "platform_value" + assert response.system == "system_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_get_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryType.to_json(catalog.EntryType()) + req.return_value.content = return_value + + request = catalog.GetEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryType() + post_with_metadata.return_value = catalog.EntryType(), metadata + + client.get_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_create_entry_type_rest_bad_request( - request_type=catalog.CreateEntryTypeRequest, +def test_create_aspect_type_rest_bad_request( + request_type=catalog.CreateAspectTypeRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -18119,24 +20663,24 @@ def test_create_entry_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_type(request) + client.create_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateEntryTypeRequest, + catalog.CreateAspectTypeRequest, dict, ], ) -def test_create_entry_type_rest_call_success(request_type): +def test_create_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["entry_type"] = { + request_init["aspect_type"] = { "name": "name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -18145,18 +20689,37 @@ def test_create_entry_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "type_aliases": ["type_aliases_value1", "type_aliases_value2"], - "platform": "platform_value", - "system": "system_value", - "required_aspects": [{"type_": "type__value"}], "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + "metadata_template": { + "index": 536, + "name": "name_value", + "type_": "type__value", + "record_fields": {}, + "enum_values": [ + {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} + ], + "map_items": {}, + "array_items": {}, + "type_id": "type_id_value", + "type_ref": "type_ref_value", + "constraints": {"required": True}, + "annotations": { + "deprecated": "deprecated_value", + "display_name": "display_name_value", + "description": "description_value", + "display_order": 1393, + "string_type": "string_type_value", + "string_values": ["string_values_value1", "string_values_value2"], + }, + }, + "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] + test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18184,7 +20747,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18214,10 +20777,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] else: - del request_init["entry_type"][field][subfield] + del request_init["aspect_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18232,14 +20795,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_type(request) + response = client.create_aspect_type(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_type_rest_interceptors(null_interceptor): +def test_create_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18255,16 +20818,19 @@ def test_create_entry_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_type" + transports.CatalogServiceRestInterceptor, "post_create_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_create_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry_type" + transports.CatalogServiceRestInterceptor, "pre_create_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) + pb_message = catalog.CreateAspectTypeRequest.pb( + catalog.CreateAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18278,7 +20844,7 @@ def test_create_entry_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.CreateEntryTypeRequest() + request = catalog.CreateAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18287,7 +20853,7 @@ def test_create_entry_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_entry_type( + client.create_aspect_type( request, metadata=[ ("key", "val"), @@ -18300,15 +20866,17 @@ def test_create_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_type_rest_bad_request( - request_type=catalog.UpdateEntryTypeRequest, +def test_update_aspect_type_rest_bad_request( + request_type=catalog.UpdateAspectTypeRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } request = request_type(**request_init) @@ -18324,27 +20892,29 @@ def test_update_entry_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_type(request) + client.update_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryTypeRequest, + catalog.UpdateAspectTypeRequest, dict, ], ) -def test_update_entry_type_rest_call_success(request_type): +def test_update_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } - request_init["entry_type"] = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3", + request_init["aspect_type"] = { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -18352,18 +20922,37 @@ def test_update_entry_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "type_aliases": ["type_aliases_value1", "type_aliases_value2"], - "platform": "platform_value", - "system": "system_value", - "required_aspects": [{"type_": "type__value"}], "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + "metadata_template": { + "index": 536, + "name": "name_value", + "type_": "type__value", + "record_fields": {}, + "enum_values": [ + {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} + ], + "map_items": {}, + "array_items": {}, + "type_id": "type_id_value", + "type_ref": "type_ref_value", + "constraints": {"required": True}, + "annotations": { + "deprecated": "deprecated_value", + "display_name": "display_name_value", + "description": "description_value", + "display_order": 1393, + "string_type": "string_type_value", + "string_values": ["string_values_value1", "string_values_value2"], + }, + }, + "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] + test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18391,7 +20980,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18421,131 +21010,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] else: - del request_init["entry_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_type" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry_type" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.UpdateEntryTypeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_entry_type( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_type_rest_bad_request( - request_type=catalog.DeleteEntryTypeRequest, -): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_type(request) - - -@pytest.mark.parametrize( - "request_type", - [ - catalog.DeleteEntryTypeRequest, - dict, - ], -) -def test_delete_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + del request_init["aspect_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18560,14 +21028,14 @@ def test_delete_entry_type_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_type(request) + response = client.update_aspect_type(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_type_rest_interceptors(null_interceptor): +def test_update_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18583,16 +21051,19 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_type" + transports.CatalogServiceRestInterceptor, "post_update_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_update_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry_type" + transports.CatalogServiceRestInterceptor, "pre_update_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) + pb_message = catalog.UpdateAspectTypeRequest.pb( + catalog.UpdateAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18606,7 +21077,7 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.DeleteEntryTypeRequest() + request = catalog.UpdateAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18615,7 +21086,7 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_entry_type( + client.update_aspect_type( request, metadata=[ ("key", "val"), @@ -18628,12 +21099,14 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): +def test_delete_aspect_type_rest_bad_request( + request_type=catalog.DeleteAspectTypeRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18648,53 +21121,45 @@ def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_types(request) + client.delete_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListEntryTypesRequest, + catalog.DeleteAspectTypeRequest, dict, ], ) -def test_list_entry_types_rest_call_success(request_type): +def test_delete_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_types(request) + response = client.delete_aspect_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_types_rest_interceptors(null_interceptor): +def test_delete_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18708,16 +21173,21 @@ def test_list_entry_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_delete_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entry_types" + transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) + pb_message = catalog.DeleteAspectTypeRequest.pb( + catalog.DeleteAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18728,21 +21198,19 @@ def test_list_entry_types_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryTypesResponse.to_json( - catalog.ListEntryTypesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.ListEntryTypesRequest() + request = catalog.DeleteAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntryTypesResponse() - post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_entry_types( + client.delete_aspect_type( request, metadata=[ ("key", "val"), @@ -18755,12 +21223,14 @@ def test_list_entry_types_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): +def test_list_aspect_types_rest_bad_request( + request_type=catalog.ListAspectTypesRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18775,37 +21245,31 @@ def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_type(request) + client.list_aspect_types(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryTypeRequest, + catalog.ListAspectTypesRequest, dict, ], ) -def test_get_entry_type_rest_call_success(request_type): +def test_list_aspect_types_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - type_aliases=["type_aliases_value"], - platform="platform_value", - system="system_value", + return_value = catalog.ListAspectTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -18813,27 +21277,21 @@ def test_get_entry_type_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_type(request) + response = client.list_aspect_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.type_aliases == ["type_aliases_value"] - assert response.platform == "platform_value" - assert response.system == "system_value" + assert isinstance(response, pagers.ListAspectTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_type_rest_interceptors(null_interceptor): +def test_list_aspect_types_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18847,16 +21305,16 @@ def test_get_entry_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_type" + transports.CatalogServiceRestInterceptor, "post_list_aspect_types" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry_type" + transports.CatalogServiceRestInterceptor, "pre_list_aspect_types" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) + pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18867,19 +21325,21 @@ def test_get_entry_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryType.to_json(catalog.EntryType()) + return_value = catalog.ListAspectTypesResponse.to_json( + catalog.ListAspectTypesResponse() + ) req.return_value.content = return_value - request = catalog.GetEntryTypeRequest() + request = catalog.ListAspectTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.EntryType() - post_with_metadata.return_value = catalog.EntryType(), metadata + post.return_value = catalog.ListAspectTypesResponse() + post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata - client.get_entry_type( + client.list_aspect_types( request, metadata=[ ("key", "val"), @@ -18892,14 +21352,12 @@ def test_get_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_aspect_type_rest_bad_request( - request_type=catalog.CreateAspectTypeRequest, -): +def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18914,146 +21372,61 @@ def test_create_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_aspect_type(request) + client.get_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateAspectTypeRequest, + catalog.GetAspectTypeRequest, dict, ], ) -def test_create_aspect_type_rest_call_success(request_type): +def test_get_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["aspect_type"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, - "metadata_template": { - "index": 536, - "name": "name_value", - "type_": "type__value", - "record_fields": {}, - "enum_values": [ - {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} - ], - "map_items": {}, - "array_items": {}, - "type_id": "type_id_value", - "type_ref": "type_ref_value", - "constraints": {"required": True}, - "annotations": { - "deprecated": "deprecated_value", - "display_name": "display_name_value", - "description": "description_value", - "display_order": 1393, - "string_type": "string_type_value", - "string_values": ["string_values_value1", "string_values_value2"], - }, - }, - "transfer_status": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] - else: - del request_init["aspect_type"][field][subfield] + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_aspect_type(request) + response = client.get_aspect_type(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.AspectType) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_aspect_type_rest_interceptors(null_interceptor): +def test_get_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19067,21 +21440,16 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_aspect_type" + transports.CatalogServiceRestInterceptor, "post_get_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_aspect_type_with_metadata", + transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_get_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateAspectTypeRequest.pb( - catalog.CreateAspectTypeRequest() - ) + pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19092,19 +21460,19 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.AspectType.to_json(catalog.AspectType()) req.return_value.content = return_value - request = catalog.CreateAspectTypeRequest() + request = catalog.GetAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.AspectType() + post_with_metadata.return_value = catalog.AspectType(), metadata - client.create_aspect_type( + client.get_aspect_type( request, metadata=[ ("key", "val"), @@ -19117,18 +21485,14 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_aspect_type_rest_bad_request( - request_type=catalog.UpdateAspectTypeRequest, +def test_create_entry_group_rest_bad_request( + request_type=catalog.CreateEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19143,29 +21507,25 @@ def test_update_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_aspect_type(request) + client.create_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateAspectTypeRequest, + catalog.CreateEntryGroupRequest, dict, ], ) -def test_update_aspect_type_rest_call_success(request_type): +def test_create_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } - request_init["aspect_type"] = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["entry_group"] = { + "name": "name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -19173,29 +21533,6 @@ def test_update_aspect_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, - "metadata_template": { - "index": 536, - "name": "name_value", - "type_": "type__value", - "record_fields": {}, - "enum_values": [ - {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} - ], - "map_items": {}, - "array_items": {}, - "type_id": "type_id_value", - "type_ref": "type_ref_value", - "constraints": {"required": True}, - "annotations": { - "deprecated": "deprecated_value", - "display_name": "display_name_value", - "description": "description_value", - "display_order": 1393, - "string_type": "string_type_value", - "string_values": ["string_values_value1", "string_values_value2"], - }, - }, "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -19203,7 +21540,7 @@ def test_update_aspect_type_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] + test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -19231,7 +21568,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER + for field, value in request_init["entry_group"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -19261,10 +21598,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] else: - del request_init["aspect_type"][field][subfield] + del request_init["entry_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19279,14 +21616,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_aspect_type(request) + response = client.create_entry_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_aspect_type_rest_interceptors(null_interceptor): +def test_create_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19302,18 +21639,18 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_aspect_type" + transports.CatalogServiceRestInterceptor, "post_create_entry_group" ) as post, mock.patch.object( transports.CatalogServiceRestInterceptor, - "post_update_aspect_type_with_metadata", + "post_create_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_create_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateAspectTypeRequest.pb( - catalog.UpdateAspectTypeRequest() + pb_message = catalog.CreateEntryGroupRequest.pb( + catalog.CreateEntryGroupRequest() ) transcode.return_value = { "method": "post", @@ -19328,7 +21665,7 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.UpdateAspectTypeRequest() + request = catalog.CreateEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19337,7 +21674,7 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_aspect_type( + client.create_entry_group( request, metadata=[ ("key", "val"), @@ -19350,14 +21687,18 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_aspect_type_rest_bad_request( - request_type=catalog.DeleteAspectTypeRequest, +def test_update_entry_group_rest_bad_request( + request_type=catalog.UpdateEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = { + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19372,23 +21713,105 @@ def test_delete_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_aspect_type(request) + client.update_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteAspectTypeRequest, + catalog.UpdateEntryGroupRequest, dict, ], ) -def test_delete_aspect_type_rest_call_success(request_type): +def test_update_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = { + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } + } + request_init["entry_group"] = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "transfer_status": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] + else: + del request_init["entry_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19403,14 +21826,14 @@ def test_delete_aspect_type_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_aspect_type(request) + response = client.update_entry_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_aspect_type_rest_interceptors(null_interceptor): +def test_update_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19426,18 +21849,18 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_aspect_type" + transports.CatalogServiceRestInterceptor, "post_update_entry_group" ) as post, mock.patch.object( transports.CatalogServiceRestInterceptor, - "post_delete_aspect_type_with_metadata", + "post_update_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_update_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteAspectTypeRequest.pb( - catalog.DeleteAspectTypeRequest() + pb_message = catalog.UpdateEntryGroupRequest.pb( + catalog.UpdateEntryGroupRequest() ) transcode.return_value = { "method": "post", @@ -19452,7 +21875,7 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.DeleteAspectTypeRequest() + request = catalog.UpdateEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19461,7 +21884,7 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_aspect_type( + client.update_entry_group( request, metadata=[ ("key", "val"), @@ -19474,14 +21897,14 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_aspect_types_rest_bad_request( - request_type=catalog.ListAspectTypesRequest, +def test_delete_entry_group_rest_bad_request( + request_type=catalog.DeleteEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19496,53 +21919,45 @@ def test_list_aspect_types_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_aspect_types(request) + client.delete_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListAspectTypesRequest, + catalog.DeleteEntryGroupRequest, dict, ], ) -def test_list_aspect_types_rest_call_success(request_type): +def test_delete_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_aspect_types(request) + response = client.delete_entry_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_aspect_types_rest_interceptors(null_interceptor): +def test_delete_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19556,16 +21971,21 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_aspect_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_group" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_delete_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_aspect_types" + transports.CatalogServiceRestInterceptor, "pre_delete_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) + pb_message = catalog.DeleteEntryGroupRequest.pb( + catalog.DeleteEntryGroupRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19576,21 +21996,19 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListAspectTypesResponse.to_json( - catalog.ListAspectTypesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.ListAspectTypesRequest() + request = catalog.DeleteEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListAspectTypesResponse() - post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_aspect_types( + client.delete_entry_group( request, metadata=[ ("key", "val"), @@ -19603,12 +22021,14 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): +def test_list_entry_groups_rest_bad_request( + request_type=catalog.ListEntryGroupsRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19623,35 +22043,31 @@ def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_aspect_type(request) + client.list_entry_groups(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetAspectTypeRequest, + catalog.ListEntryGroupsRequest, dict, ], ) -def test_get_aspect_type_rest_call_success(request_type): +def test_list_entry_groups_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.AspectType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + return_value = catalog.ListEntryGroupsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -19659,25 +22075,21 @@ def test_get_aspect_type_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_aspect_type(request) + response = client.list_entry_groups(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_aspect_type_rest_interceptors(null_interceptor): +def test_list_entry_groups_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19691,16 +22103,16 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_aspect_type" + transports.CatalogServiceRestInterceptor, "post_list_entry_groups" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_list_entry_groups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) + pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19711,19 +22123,21 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.AspectType.to_json(catalog.AspectType()) + return_value = catalog.ListEntryGroupsResponse.to_json( + catalog.ListEntryGroupsResponse() + ) req.return_value.content = return_value - request = catalog.GetAspectTypeRequest() + request = catalog.ListEntryGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.AspectType() - post_with_metadata.return_value = catalog.AspectType(), metadata + post.return_value = catalog.ListEntryGroupsResponse() + post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata - client.get_aspect_type( + client.list_entry_groups( request, metadata=[ ("key", "val"), @@ -19736,14 +22150,12 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_entry_group_rest_bad_request( - request_type=catalog.CreateEntryGroupRequest, -): +def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19758,123 +22170,61 @@ def test_create_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_group(request) + client.get_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateEntryGroupRequest, + catalog.GetEntryGroupRequest, dict, ], ) -def test_create_entry_group_rest_call_success(request_type): +def test_get_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["entry_group"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "transfer_status": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] - else: - del request_init["entry_group"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryGroup( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_group(request) + response = client.get_entry_group(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.EntryGroup) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_group_rest_interceptors(null_interceptor): +def test_get_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19888,21 +22238,16 @@ def test_create_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_group" + transports.CatalogServiceRestInterceptor, "post_get_entry_group" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_entry_group_with_metadata", + transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry_group" + transports.CatalogServiceRestInterceptor, "pre_get_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryGroupRequest.pb( - catalog.CreateEntryGroupRequest() - ) + pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19913,19 +22258,19 @@ def test_create_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) req.return_value.content = return_value - request = catalog.CreateEntryGroupRequest() + request = catalog.GetEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.EntryGroup() + post_with_metadata.return_value = catalog.EntryGroup(), metadata - client.create_entry_group( + client.get_entry_group( request, metadata=[ ("key", "val"), @@ -19938,18 +22283,12 @@ def test_create_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_group_rest_bad_request( - request_type=catalog.UpdateEntryGroupRequest, -): +def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19964,44 +22303,50 @@ def test_update_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_group(request) + client.create_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryGroupRequest, + catalog.CreateEntryRequest, dict, ], ) -def test_update_entry_group_rest_call_success(request_type): +def test_create_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } - request_init["entry_group"] = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3", - "uid": "uid_value", + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init["entry"] = { + "name": "name_value", + "entry_type": "entry_type_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "transfer_status": 1, + "aspects": {}, + "parent_entry": "parent_entry_value", + "fully_qualified_name": "fully_qualified_name_value", + "entry_source": { + "resource": "resource_value", + "system": "system_value", + "platform": "platform_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "ancestors": [{"name": "name_value", "type_": "type__value"}], + "create_time": {}, + "update_time": {}, + "location": "location_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] + test_field = catalog.CreateEntryRequest.meta.fields["entry"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20029,7 +22374,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER + for field, value in request_init["entry"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20059,32 +22404,44 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] else: - del request_init["entry_group"][field][subfield] + del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_group(request) + response = client.create_entry(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.Entry) + assert response.name == "name_value" + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_group_rest_interceptors(null_interceptor): +def test_create_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20098,21 +22455,16 @@ def test_update_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_group" + transports.CatalogServiceRestInterceptor, "post_create_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_update_entry_group_with_metadata", + transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry_group" + transports.CatalogServiceRestInterceptor, "pre_create_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryGroupRequest.pb( - catalog.UpdateEntryGroupRequest() - ) + pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20123,19 +22475,19 @@ def test_update_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.UpdateEntryGroupRequest() + request = catalog.CreateEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.update_entry_group( + client.create_entry( request, metadata=[ ("key", "val"), @@ -20148,14 +22500,16 @@ def test_update_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_entry_group_rest_bad_request( - request_type=catalog.DeleteEntryGroupRequest, -): +def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20170,155 +22524,125 @@ def test_delete_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_group(request) + client.update_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteEntryGroupRequest, + catalog.UpdateEntryRequest, dict, ], ) -def test_delete_entry_group_rest_call_success(request_type): +def test_update_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_group(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_group" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_delete_entry_group_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry_group" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryGroupRequest.pb( - catalog.DeleteEntryGroupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, + request_init = { + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } + } + request_init["entry"] = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4", + "entry_type": "entry_type_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "aspects": {}, + "parent_entry": "parent_entry_value", + "fully_qualified_name": "fully_qualified_name_value", + "entry_source": { + "resource": "resource_value", + "system": "system_value", + "platform": "platform_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "ancestors": [{"name": "name_value", "type_": "type__value"}], + "create_time": {}, + "update_time": {}, + "location": "location_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.DeleteEntryGroupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_entry_group( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryRequest.meta.fields["entry"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_list_entry_groups_rest_bad_request( - request_type=catalog.ListEntryGroupsRequest, -): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_groups(request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -@pytest.mark.parametrize( - "request_type", - [ - catalog.ListEntryGroupsRequest, - dict, - ], -) -def test_list_entry_groups_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] + else: + del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], + return_value = catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", ) # Wrap the value into a proper Response obj @@ -20326,21 +22650,23 @@ def test_list_entry_groups_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_groups(request) + response = client.update_entry(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + assert isinstance(response, catalog.Entry) + assert response.name == "name_value" + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_groups_rest_interceptors(null_interceptor): +def test_update_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20354,16 +22680,16 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_groups" + transports.CatalogServiceRestInterceptor, "post_update_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata" + transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entry_groups" + transports.CatalogServiceRestInterceptor, "pre_update_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) + pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20374,21 +22700,19 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryGroupsResponse.to_json( - catalog.ListEntryGroupsResponse() - ) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.ListEntryGroupsRequest() + request = catalog.UpdateEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntryGroupsResponse() - post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.list_entry_groups( + client.update_entry( request, metadata=[ ("key", "val"), @@ -20401,12 +22725,14 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): +def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20421,35 +22747,35 @@ def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_group(request) + client.delete_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryGroupRequest, + catalog.DeleteEntryRequest, dict, ], ) -def test_get_entry_group_rest_call_success(request_type): +def test_delete_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup( + return_value = catalog.Entry( name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", ) # Wrap the value into a proper Response obj @@ -20457,25 +22783,23 @@ def test_get_entry_group_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_group(request) + response = client.delete_entry(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) + assert isinstance(response, catalog.Entry) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_group_rest_interceptors(null_interceptor): +def test_delete_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20489,16 +22813,16 @@ def test_get_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_group" + transports.CatalogServiceRestInterceptor, "post_delete_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata" + transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry_group" + transports.CatalogServiceRestInterceptor, "pre_delete_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) + pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20509,19 +22833,19 @@ def test_get_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.GetEntryGroupRequest() + request = catalog.DeleteEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.EntryGroup() - post_with_metadata.return_value = catalog.EntryGroup(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.get_entry_group( + client.delete_entry( request, metadata=[ ("key", "val"), @@ -20534,7 +22858,7 @@ def test_get_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): +def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20551,124 +22875,33 @@ def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry(request) - - -@pytest.mark.parametrize( - "request_type", - [ - catalog.CreateEntryRequest, - dict, - ], -) -def test_create_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} - request_init["entry"] = { - "name": "name_value", - "entry_type": "entry_type_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "aspects": {}, - "parent_entry": "parent_entry_value", - "fully_qualified_name": "fully_qualified_name_value", - "entry_source": { - "resource": "resource_value", - "system": "system_value", - "platform": "platform_value", - "display_name": "display_name_value", - "description": "description_value", - "labels": {}, - "ancestors": [{"name": "name_value", "type_": "type__value"}], - "create_time": {}, - "update_time": {}, - "location": "location_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entries(request) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListEntriesRequest, + dict, + ], +) +def test_list_entries_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", + return_value = catalog.ListEntriesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -20676,23 +22909,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry(request) + response = client.list_entries(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_rest_interceptors(null_interceptor): +def test_list_entries_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20706,16 +22936,16 @@ def test_create_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry" + transports.CatalogServiceRestInterceptor, "post_list_entries" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry" + transports.CatalogServiceRestInterceptor, "pre_list_entries" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) + pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20726,19 +22956,21 @@ def test_create_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = catalog.ListEntriesResponse.to_json( + catalog.ListEntriesResponse() + ) req.return_value.content = return_value - request = catalog.CreateEntryRequest() + request = catalog.ListEntriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = catalog.ListEntriesResponse() + post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata - client.create_entry( + client.list_entries( request, metadata=[ ("key", "val"), @@ -20751,15 +22983,13 @@ def test_create_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): +def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } request = request_type(**request_init) @@ -20775,115 +23005,25 @@ def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry(request) + client.get_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryRequest, + catalog.GetEntryRequest, dict, ], ) -def test_update_entry_rest_call_success(request_type): +def test_get_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } - } - request_init["entry"] = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4", - "entry_type": "entry_type_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "aspects": {}, - "parent_entry": "parent_entry_value", - "fully_qualified_name": "fully_qualified_name_value", - "entry_source": { - "resource": "resource_value", - "system": "system_value", - "platform": "platform_value", - "display_name": "display_name_value", - "description": "description_value", - "labels": {}, - "ancestors": [{"name": "name_value", "type_": "type__value"}], - "create_time": {}, - "update_time": {}, - "location": "location_value", - }, + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20906,7 +23046,7 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry(request) + response = client.get_entry(request) # Establish that the response is the type that we expect. assert isinstance(response, catalog.Entry) @@ -20917,7 +23057,7 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_rest_interceptors(null_interceptor): +def test_get_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20931,16 +23071,16 @@ def test_update_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry" + transports.CatalogServiceRestInterceptor, "post_get_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry" + transports.CatalogServiceRestInterceptor, "pre_get_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) + pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20954,7 +23094,7 @@ def test_update_entry_rest_interceptors(null_interceptor): return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.UpdateEntryRequest() + request = catalog.GetEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20963,7 +23103,7 @@ def test_update_entry_rest_interceptors(null_interceptor): post.return_value = catalog.Entry() post_with_metadata.return_value = catalog.Entry(), metadata - client.update_entry( + client.get_entry( request, metadata=[ ("key", "val"), @@ -20976,14 +23116,12 @@ def test_update_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): +def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20998,25 +23136,23 @@ def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry(request) + client.lookup_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteEntryRequest, + catalog.LookupEntryRequest, dict, ], ) -def test_delete_entry_rest_call_success(request_type): +def test_lookup_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21039,7 +23175,7 @@ def test_delete_entry_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry(request) + response = client.lookup_entry(request) # Establish that the response is the type that we expect. assert isinstance(response, catalog.Entry) @@ -21050,7 +23186,7 @@ def test_delete_entry_rest_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_rest_interceptors(null_interceptor): +def test_lookup_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21064,16 +23200,16 @@ def test_delete_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry" + transports.CatalogServiceRestInterceptor, "post_lookup_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry" + transports.CatalogServiceRestInterceptor, "pre_lookup_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) + pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21087,7 +23223,7 @@ def test_delete_entry_rest_interceptors(null_interceptor): return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.DeleteEntryRequest() + request = catalog.LookupEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21096,7 +23232,7 @@ def test_delete_entry_rest_interceptors(null_interceptor): post.return_value = catalog.Entry() post_with_metadata.return_value = catalog.Entry(), metadata - client.delete_entry( + client.lookup_entry( request, metadata=[ ("key", "val"), @@ -21109,12 +23245,12 @@ def test_delete_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): +def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21129,30 +23265,32 @@ def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entries(request) + client.search_entries(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListEntriesRequest, + catalog.SearchEntriesRequest, dict, ], ) -def test_list_entries_rest_call_success(request_type): +def test_search_entries_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse( + return_value = catalog.SearchEntriesResponse( + total_size=1086, next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -21160,20 +23298,22 @@ def test_list_entries_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entries(request) + response = client.search_entries(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) + assert isinstance(response, pagers.SearchEntriesPager) + assert response.total_size == 1086 assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entries_rest_interceptors(null_interceptor): +def test_search_entries_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21187,16 +23327,16 @@ def test_list_entries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entries" + transports.CatalogServiceRestInterceptor, "post_search_entries" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata" + transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entries" + transports.CatalogServiceRestInterceptor, "pre_search_entries" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) + pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21207,21 +23347,21 @@ def test_list_entries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntriesResponse.to_json( - catalog.ListEntriesResponse() + return_value = catalog.SearchEntriesResponse.to_json( + catalog.SearchEntriesResponse() ) req.return_value.content = return_value - request = catalog.ListEntriesRequest() + request = catalog.SearchEntriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntriesResponse() - post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata + post.return_value = catalog.SearchEntriesResponse() + post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata - client.list_entries( + client.search_entries( request, metadata=[ ("key", "val"), @@ -21234,14 +23374,14 @@ def test_list_entries_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): +def test_create_metadata_job_rest_bad_request( + request_type=catalog.CreateMetadataJobRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21256,59 +23396,172 @@ def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry(request) + client.create_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryRequest, + catalog.CreateMetadataJobRequest, dict, ], ) -def test_get_entry_rest_call_success(request_type): +def test_create_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["metadata_job"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "type_": 1, + "import_spec": { + "source_storage_uri": "source_storage_uri_value", + "source_create_time": {}, + "scope": { + "entry_groups": ["entry_groups_value1", "entry_groups_value2"], + "entry_types": ["entry_types_value1", "entry_types_value2"], + "aspect_types": ["aspect_types_value1", "aspect_types_value2"], + "glossaries": ["glossaries_value1", "glossaries_value2"], + "entry_link_types": [ + "entry_link_types_value1", + "entry_link_types_value2", + ], + "referenced_entry_scopes": [ + "referenced_entry_scopes_value1", + "referenced_entry_scopes_value2", + ], + }, + "entry_sync_mode": 1, + "aspect_sync_mode": 1, + "log_level": 1, + }, + "export_spec": { + "scope": { + "organization_level": True, + "projects": ["projects_value1", "projects_value2"], + "entry_groups": ["entry_groups_value1", "entry_groups_value2"], + "entry_types": ["entry_types_value1", "entry_types_value2"], + "aspect_types": ["aspect_types_value1", "aspect_types_value2"], + }, + "output_path": "output_path_value", + }, + "import_result": { + "deleted_entries": 1584, + "updated_entries": 1600, + "created_entries": 1585, + "unchanged_entries": 1798, + "recreated_entries": 1800, + "update_time": {}, + "deleted_entry_links": 2024, + "created_entry_links": 2025, + "unchanged_entry_links": 2238, + }, + "export_result": { + "exported_entries": 1732, + "error_message": "error_message_value", + }, + "status": { + "state": 1, + "message": "message_value", + "completion_percent": 1930, + "update_time": {}, + }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_job"][field])): + del request_init["metadata_job"][field][i][subfield] + else: + del request_init["metadata_job"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry(request) + response = client.create_metadata_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_rest_interceptors(null_interceptor): +def test_create_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21322,16 +23575,21 @@ def test_get_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_metadata_job" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_create_metadata_job_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry" + transports.CatalogServiceRestInterceptor, "pre_create_metadata_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) + pb_message = catalog.CreateMetadataJobRequest.pb( + catalog.CreateMetadataJobRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21342,19 +23600,19 @@ def test_get_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.GetEntryRequest() + request = catalog.CreateMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_entry( + client.create_metadata_job( request, metadata=[ ("key", "val"), @@ -21367,12 +23625,12 @@ def test_get_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): +def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21387,33 +23645,32 @@ def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lookup_entry(request) + client.get_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.LookupEntryRequest, + catalog.GetMetadataJobRequest, dict, ], ) -def test_lookup_entry_rest_call_success(request_type): +def test_get_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( + return_value = catalog.MetadataJob( name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, ) # Wrap the value into a proper Response obj @@ -21421,23 +23678,22 @@ def test_lookup_entry_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.MetadataJob.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lookup_entry(request) + response = client.get_metadata_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) + assert isinstance(response, catalog.MetadataJob) assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_entry_rest_interceptors(null_interceptor): +def test_get_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21451,16 +23707,16 @@ def test_lookup_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_lookup_entry" + transports.CatalogServiceRestInterceptor, "post_get_metadata_job" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_lookup_entry" + transports.CatalogServiceRestInterceptor, "pre_get_metadata_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) + pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21471,19 +23727,19 @@ def test_lookup_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) req.return_value.content = return_value - request = catalog.LookupEntryRequest() + request = catalog.GetMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = catalog.MetadataJob() + post_with_metadata.return_value = catalog.MetadataJob(), metadata - client.lookup_entry( + client.get_metadata_job( request, metadata=[ ("key", "val"), @@ -21496,12 +23752,14 @@ def test_lookup_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): +def test_list_metadata_jobs_rest_bad_request( + request_type=catalog.ListMetadataJobsRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21516,32 +23774,31 @@ def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_entries(request) + client.list_metadata_jobs(request) @pytest.mark.parametrize( "request_type", [ - catalog.SearchEntriesRequest, + catalog.ListMetadataJobsRequest, dict, ], ) -def test_search_entries_rest_call_success(request_type): +def test_list_metadata_jobs_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse( - total_size=1086, + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -21549,22 +23806,21 @@ def test_search_entries_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListMetadataJobsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_entries(request) + response = client.list_metadata_jobs(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesPager) - assert response.total_size == 1086 + assert isinstance(response, pagers.ListMetadataJobsPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_entries_rest_interceptors(null_interceptor): +def test_list_metadata_jobs_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21578,16 +23834,19 @@ def test_search_entries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_search_entries" + transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_list_metadata_jobs_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_search_entries" + transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) + pb_message = catalog.ListMetadataJobsRequest.pb( + catalog.ListMetadataJobsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21598,21 +23857,21 @@ def test_search_entries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.SearchEntriesResponse.to_json( - catalog.SearchEntriesResponse() + return_value = catalog.ListMetadataJobsResponse.to_json( + catalog.ListMetadataJobsResponse() ) req.return_value.content = return_value - request = catalog.SearchEntriesRequest() + request = catalog.ListMetadataJobsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.SearchEntriesResponse() - post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata + post.return_value = catalog.ListMetadataJobsResponse() + post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata - client.search_entries( + client.list_metadata_jobs( request, metadata=[ ("key", "val"), @@ -21625,14 +23884,14 @@ def test_search_entries_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_metadata_job_rest_bad_request( - request_type=catalog.CreateMetadataJobRequest, +def test_cancel_metadata_job_rest_bad_request( + request_type=catalog.CancelMetadataJobRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21647,160 +23906,45 @@ def test_create_metadata_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_metadata_job(request) + client.cancel_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateMetadataJobRequest, + catalog.CancelMetadataJobRequest, dict, ], ) -def test_create_metadata_job_rest_call_success(request_type): +def test_cancel_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["metadata_job"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "type_": 1, - "import_spec": { - "source_storage_uri": "source_storage_uri_value", - "source_create_time": {}, - "scope": { - "entry_groups": ["entry_groups_value1", "entry_groups_value2"], - "entry_types": ["entry_types_value1", "entry_types_value2"], - "aspect_types": ["aspect_types_value1", "aspect_types_value2"], - }, - "entry_sync_mode": 1, - "aspect_sync_mode": 1, - "log_level": 1, - }, - "export_spec": { - "scope": { - "organization_level": True, - "projects": ["projects_value1", "projects_value2"], - "entry_groups": ["entry_groups_value1", "entry_groups_value2"], - "entry_types": ["entry_types_value1", "entry_types_value2"], - "aspect_types": ["aspect_types_value1", "aspect_types_value2"], - }, - "output_path": "output_path_value", - }, - "import_result": { - "deleted_entries": 1584, - "updated_entries": 1600, - "created_entries": 1585, - "unchanged_entries": 1798, - "recreated_entries": 1800, - "update_time": {}, - }, - "export_result": { - "exported_entries": 1732, - "error_message": "error_message_value", - }, - "status": { - "state": 1, - "message": "message_value", - "completion_percent": 1930, - "update_time": {}, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["metadata_job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["metadata_job"][field])): - del request_init["metadata_job"][field][i][subfield] - else: - del request_init["metadata_job"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_metadata_job(request) + response = client.cancel_metadata_job(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_metadata_job_rest_interceptors(null_interceptor): +def test_cancel_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21814,20 +23958,11 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_metadata_job" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_metadata_job_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_metadata_job" + transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job" ) as pre: pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateMetadataJobRequest.pb( - catalog.CreateMetadataJobRequest() + pb_message = catalog.CancelMetadataJobRequest.pb( + catalog.CancelMetadataJobRequest() ) transcode.return_value = { "method": "post", @@ -21839,19 +23974,15 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - request = catalog.CreateMetadataJobRequest() + request = catalog.CancelMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_metadata_job( + client.cancel_metadata_job( request, metadata=[ ("key", "val"), @@ -21860,56 +23991,129 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): +def test_create_entry_link_rest_bad_request( + request_type=catalog.CreateEntryLinkRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_metadata_job(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_link(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryLinkRequest, + dict, + ], +) +def test_create_entry_link_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init["entry_link"] = { + "name": "name_value", + "entry_link_type": "entry_link_type_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "entry_references": [{"name": "name_value", "path": "path_value", "type_": 2}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryLinkRequest.meta.fields["entry_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - catalog.GetMetadataJobRequest, - dict, - ], -) -def test_get_metadata_job_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_link"][field])): + del request_init["entry_link"][field][i][subfield] + else: + del request_init["entry_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob( + return_value = catalog.EntryLink( name="name_value", - uid="uid_value", - type_=catalog.MetadataJob.Type.IMPORT, + entry_link_type="entry_link_type_value", ) # Wrap the value into a proper Response obj @@ -21917,22 +24121,21 @@ def test_get_metadata_job_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) + return_value = catalog.EntryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_metadata_job(request) + response = client.create_entry_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) + assert isinstance(response, catalog.EntryLink) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.type_ == catalog.MetadataJob.Type.IMPORT + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_metadata_job_rest_interceptors(null_interceptor): +def test_create_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21946,16 +24149,16 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_metadata_job" + transports.CatalogServiceRestInterceptor, "post_create_entry_link" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata" + transports.CatalogServiceRestInterceptor, "post_create_entry_link_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_metadata_job" + transports.CatalogServiceRestInterceptor, "pre_create_entry_link" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) + pb_message = catalog.CreateEntryLinkRequest.pb(catalog.CreateEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21966,19 +24169,19 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) req.return_value.content = return_value - request = catalog.GetMetadataJobRequest() + request = catalog.CreateEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.MetadataJob() - post_with_metadata.return_value = catalog.MetadataJob(), metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.get_metadata_job( + client.create_entry_link( request, metadata=[ ("key", "val"), @@ -21991,14 +24194,16 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_metadata_jobs_rest_bad_request( - request_type=catalog.ListMetadataJobsRequest, +def test_delete_entry_link_rest_bad_request( + request_type=catalog.DeleteEntryLinkRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22013,31 +24218,33 @@ def test_list_metadata_jobs_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_metadata_jobs(request) + client.delete_entry_link(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListMetadataJobsRequest, + catalog.DeleteEntryLinkRequest, dict, ], ) -def test_list_metadata_jobs_rest_call_success(request_type): +def test_delete_entry_link_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], + return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", ) # Wrap the value into a proper Response obj @@ -22045,21 +24252,21 @@ def test_list_metadata_jobs_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.EntryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_metadata_jobs(request) + response = client.delete_entry_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_metadata_jobs_rest_interceptors(null_interceptor): +def test_delete_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22073,19 +24280,16 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs" + transports.CatalogServiceRestInterceptor, "post_delete_entry_link" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_list_metadata_jobs_with_metadata", + transports.CatalogServiceRestInterceptor, "post_delete_entry_link_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs" + transports.CatalogServiceRestInterceptor, "pre_delete_entry_link" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListMetadataJobsRequest.pb( - catalog.ListMetadataJobsRequest() - ) + pb_message = catalog.DeleteEntryLinkRequest.pb(catalog.DeleteEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22096,21 +24300,19 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListMetadataJobsResponse.to_json( - catalog.ListMetadataJobsResponse() - ) + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) req.return_value.content = return_value - request = catalog.ListMetadataJobsRequest() + request = catalog.DeleteEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListMetadataJobsResponse() - post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.list_metadata_jobs( + client.delete_entry_link( request, metadata=[ ("key", "val"), @@ -22123,14 +24325,14 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_cancel_metadata_job_rest_bad_request( - request_type=catalog.CancelMetadataJobRequest, -): +def test_get_entry_link_rest_bad_request(request_type=catalog.GetEntryLinkRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22145,45 +24347,55 @@ def test_cancel_metadata_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_metadata_job(request) + client.get_entry_link(request) @pytest.mark.parametrize( "request_type", [ - catalog.CancelMetadataJobRequest, + catalog.GetEntryLinkRequest, dict, ], ) -def test_cancel_metadata_job_rest_call_success(request_type): +def test_get_entry_link_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_metadata_job(request) + response = client.get_entry_link(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_metadata_job_rest_interceptors(null_interceptor): +def test_get_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22197,12 +24409,16 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job" + transports.CatalogServiceRestInterceptor, "post_get_entry_link" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_link_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_get_entry_link" ) as pre: pre.assert_not_called() - pb_message = catalog.CancelMetadataJobRequest.pb( - catalog.CancelMetadataJobRequest() - ) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryLinkRequest.pb(catalog.GetEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22213,15 +24429,19 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) + req.return_value.content = return_value - request = catalog.CancelMetadataJobRequest() + request = catalog.GetEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.cancel_metadata_job( + client.get_entry_link( request, metadata=[ ("key", "val"), @@ -22230,6 +24450,8 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): @@ -23155,6 +25377,70 @@ def test_cancel_metadata_job_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + def test_catalog_service_rest_lro_client(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23231,6 +25517,9 @@ def test_catalog_service_base_transport(): "get_metadata_job", "list_metadata_jobs", "cancel_metadata_job", + "create_entry_link", + "delete_entry_link", + "get_entry_link", "get_location", "list_locations", "get_operation", @@ -23578,6 +25867,15 @@ def test_catalog_service_client_transport_session_collision(transport_name): session1 = client1.transport.cancel_metadata_job._session session2 = client2.transport.cancel_metadata_job._session assert session1 != session2 + session1 = client1.transport.create_entry_link._session + session2 = client2.transport.create_entry_link._session + assert session1 != session2 + session1 = client1.transport.delete_entry_link._session + session2 = client2.transport.delete_entry_link._session + assert session1 != session2 + session1 = client1.transport.get_entry_link._session + session2 = client2.transport.get_entry_link._session + assert session1 != session2 def test_catalog_service_grpc_transport_channel(): @@ -23825,10 +26123,41 @@ def test_parse_entry_group_path(): assert expected == actual -def test_entry_type_path(): +def test_entry_link_path(): project = "winkle" location = "nautilus" - entry_type = "scallop" + entry_group = "scallop" + entry_link = "abalone" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format( + project=project, + location=location, + entry_group=entry_group, + entry_link=entry_link, + ) + actual = CatalogServiceClient.entry_link_path( + project, location, entry_group, entry_link + ) + assert expected == actual + + +def test_parse_entry_link_path(): + expected = { + "project": "squid", + "location": "clam", + "entry_group": "whelk", + "entry_link": "octopus", + } + path = CatalogServiceClient.entry_link_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_link_path(path) + assert expected == actual + + +def test_entry_type_path(): + project = "oyster" + location = "nudibranch" + entry_type = "cuttlefish" expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format( project=project, location=location, @@ -23840,9 +26169,9 @@ def test_entry_type_path(): def test_parse_entry_type_path(): expected = { - "project": "abalone", - "location": "squid", - "entry_type": "clam", + "project": "mussel", + "location": "winkle", + "entry_type": "nautilus", } path = CatalogServiceClient.entry_type_path(**expected) @@ -23851,10 +26180,36 @@ def test_parse_entry_type_path(): assert expected == actual +def test_glossary_path(): + project = "scallop" + location = "abalone" + glossary = "squid" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + actual = CatalogServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "clam", + "location": "whelk", + "glossary": "octopus", + } + path = CatalogServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_glossary_path(path) + assert expected == actual + + def test_metadata_job_path(): - project = "whelk" - location = "octopus" - metadataJob = "oyster" + project = "oyster" + location = "nudibranch" + metadataJob = "cuttlefish" expected = ( "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( project=project, @@ -23868,9 +26223,9 @@ def test_metadata_job_path(): def test_parse_metadata_job_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "metadataJob": "mussel", + "project": "mussel", + "location": "winkle", + "metadataJob": "nautilus", } path = CatalogServiceClient.metadata_job_path(**expected) @@ -23880,7 +26235,7 @@ def test_parse_metadata_job_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -23890,7 +26245,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "abalone", } path = CatalogServiceClient.common_billing_account_path(**expected) @@ -23900,7 +26255,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -23910,7 +26265,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "clam", } path = CatalogServiceClient.common_folder_path(**expected) @@ -23920,7 +26275,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -23930,7 +26285,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "octopus", } path = CatalogServiceClient.common_organization_path(**expected) @@ -23940,7 +26295,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -23950,7 +26305,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nudibranch", } path = CatalogServiceClient.common_project_path(**expected) @@ -23960,8 +26315,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -23972,8 +26327,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "winkle", + "location": "nautilus", } path = CatalogServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index eb5638b911e3..ab973c7dc066 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -80,6 +80,7 @@ data_profile, data_quality, datascans, + datascans_common, processing, resources, service, @@ -7119,6 +7120,7 @@ def test_create_data_scan_rest_call_success(request_type): "job_end_trigger": {}, }, }, + "catalog_publishing_enabled": True, }, "data_profile_spec": { "sampling_percent": 0.17070000000000002, @@ -7136,6 +7138,7 @@ def test_create_data_scan_rest_call_success(request_type): "table_type": 1, "connection": "connection_value", "location": "location_value", + "project": "project_value", }, "storage_config": { "include_patterns": [ @@ -7165,7 +7168,14 @@ def test_create_data_scan_rest_call_success(request_type): "dimensions": [ {"dimension": {"name": "name_value"}, "passed": True, "score": 0.54} ], - "columns": [{"column": "column_value", "score": 0.54}], + "columns": [ + { + "column": "column_value", + "score": 0.54, + "passed": True, + "dimensions": {}, + } + ], "rules": [ { "rule": {}, @@ -7189,6 +7199,7 @@ def test_create_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {"state": 1}, }, "data_profile_result": { "row_count": 992, @@ -7511,6 +7522,7 @@ def test_update_data_scan_rest_call_success(request_type): "job_end_trigger": {}, }, }, + "catalog_publishing_enabled": True, }, "data_profile_spec": { "sampling_percent": 0.17070000000000002, @@ -7528,6 +7540,7 @@ def test_update_data_scan_rest_call_success(request_type): "table_type": 1, "connection": "connection_value", "location": "location_value", + "project": "project_value", }, "storage_config": { "include_patterns": [ @@ -7557,7 +7570,14 @@ def test_update_data_scan_rest_call_success(request_type): "dimensions": [ {"dimension": {"name": "name_value"}, "passed": True, "score": 0.54} ], - "columns": [{"column": "column_value", "score": 0.54}], + "columns": [ + { + "column": "column_value", + "score": 0.54, + "passed": True, + "dimensions": {}, + } + ], "rules": [ { "rule": {}, @@ -7581,6 +7601,7 @@ def test_update_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {"state": 1}, }, "data_profile_result": { "row_count": 992,